id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
2,200
esheldon/fitsio
fitsio/header.py
FITSHDR.next
def next(self): """ for iteration over the header entries """ if self._current < len(self._record_list): rec = self._record_list[self._current] key = rec['name'] self._current += 1 return key else: raise StopIteration
python
def next(self): """ for iteration over the header entries """ if self._current < len(self._record_list): rec = self._record_list[self._current] key = rec['name'] self._current += 1 return key else: raise StopIteration
[ "def", "next", "(", "self", ")", ":", "if", "self", ".", "_current", "<", "len", "(", "self", ".", "_record_list", ")", ":", "rec", "=", "self", ".", "_record_list", "[", "self", ".", "_current", "]", "key", "=", "rec", "[", "'name'", "]", "self", ".", "_current", "+=", "1", "return", "key", "else", ":", "raise", "StopIteration" ]
for iteration over the header entries
[ "for", "iteration", "over", "the", "header", "entries" ]
a6f07919f457a282fe240adad9d2c30906b71a15
https://github.com/esheldon/fitsio/blob/a6f07919f457a282fe240adad9d2c30906b71a15/fitsio/header.py#L359-L369
2,201
esheldon/fitsio
fitsio/header.py
FITSRecord.set_record
def set_record(self, record, **kw): """ check the record is valid and set keys in the dict parameters ---------- record: string Dict representing a record or a string representing a FITS header card """ if isstring(record): card = FITSCard(record) self.update(card) self.verify() else: if isinstance(record, FITSRecord): self.update(record) elif isinstance(record, dict): if 'name' in record and 'value' in record: self.update(record) elif 'card_string' in record: self.set_record(record['card_string']) else: raise ValueError('record must have name,value fields ' 'or a card_string field') else: raise ValueError("record must be a string card or " "dictionary or FITSRecord")
python
def set_record(self, record, **kw): """ check the record is valid and set keys in the dict parameters ---------- record: string Dict representing a record or a string representing a FITS header card """ if isstring(record): card = FITSCard(record) self.update(card) self.verify() else: if isinstance(record, FITSRecord): self.update(record) elif isinstance(record, dict): if 'name' in record and 'value' in record: self.update(record) elif 'card_string' in record: self.set_record(record['card_string']) else: raise ValueError('record must have name,value fields ' 'or a card_string field') else: raise ValueError("record must be a string card or " "dictionary or FITSRecord")
[ "def", "set_record", "(", "self", ",", "record", ",", "*", "*", "kw", ")", ":", "if", "isstring", "(", "record", ")", ":", "card", "=", "FITSCard", "(", "record", ")", "self", ".", "update", "(", "card", ")", "self", ".", "verify", "(", ")", "else", ":", "if", "isinstance", "(", "record", ",", "FITSRecord", ")", ":", "self", ".", "update", "(", "record", ")", "elif", "isinstance", "(", "record", ",", "dict", ")", ":", "if", "'name'", "in", "record", "and", "'value'", "in", "record", ":", "self", ".", "update", "(", "record", ")", "elif", "'card_string'", "in", "record", ":", "self", ".", "set_record", "(", "record", "[", "'card_string'", "]", ")", "else", ":", "raise", "ValueError", "(", "'record must have name,value fields '", "'or a card_string field'", ")", "else", ":", "raise", "ValueError", "(", "\"record must be a string card or \"", "\"dictionary or FITSRecord\"", ")" ]
check the record is valid and set keys in the dict parameters ---------- record: string Dict representing a record or a string representing a FITS header card
[ "check", "the", "record", "is", "valid", "and", "set", "keys", "in", "the", "dict" ]
a6f07919f457a282fe240adad9d2c30906b71a15
https://github.com/esheldon/fitsio/blob/a6f07919f457a282fe240adad9d2c30906b71a15/fitsio/header.py#L477-L510
2,202
esheldon/fitsio
fitsio/header.py
FITSCard._check_equals
def _check_equals(self): """ check for = in position 8, set attribute _has_equals """ card_string = self['card_string'] if len(card_string) < 9: self._has_equals = False elif card_string[8] == '=': self._has_equals = True else: self._has_equals = False
python
def _check_equals(self): """ check for = in position 8, set attribute _has_equals """ card_string = self['card_string'] if len(card_string) < 9: self._has_equals = False elif card_string[8] == '=': self._has_equals = True else: self._has_equals = False
[ "def", "_check_equals", "(", "self", ")", ":", "card_string", "=", "self", "[", "'card_string'", "]", "if", "len", "(", "card_string", ")", "<", "9", ":", "self", ".", "_has_equals", "=", "False", "elif", "card_string", "[", "8", "]", "==", "'='", ":", "self", ".", "_has_equals", "=", "True", "else", ":", "self", ".", "_has_equals", "=", "False" ]
check for = in position 8, set attribute _has_equals
[ "check", "for", "=", "in", "position", "8", "set", "attribute", "_has_equals" ]
a6f07919f457a282fe240adad9d2c30906b71a15
https://github.com/esheldon/fitsio/blob/a6f07919f457a282fe240adad9d2c30906b71a15/fitsio/header.py#L588-L598
2,203
esheldon/fitsio
fitsio/header.py
FITSCard._convert_value
def _convert_value(self, value_orig): """ things like 6 and 1.25 are converted with ast.literal_value Things like 'hello' are stripped of quotes """ import ast if value_orig is None: return value_orig if value_orig.startswith("'") and value_orig.endswith("'"): value = value_orig[1:-1] else: try: avalue = ast.parse(value_orig).body[0].value if isinstance(avalue, ast.BinOp): # this is probably a string that happens to look like # a binary operation, e.g. '25-3' value = value_orig else: value = ast.literal_eval(value_orig) except Exception: value = self._convert_string(value_orig) if isinstance(value, int) and '_' in value_orig: value = value_orig return value
python
def _convert_value(self, value_orig): """ things like 6 and 1.25 are converted with ast.literal_value Things like 'hello' are stripped of quotes """ import ast if value_orig is None: return value_orig if value_orig.startswith("'") and value_orig.endswith("'"): value = value_orig[1:-1] else: try: avalue = ast.parse(value_orig).body[0].value if isinstance(avalue, ast.BinOp): # this is probably a string that happens to look like # a binary operation, e.g. '25-3' value = value_orig else: value = ast.literal_eval(value_orig) except Exception: value = self._convert_string(value_orig) if isinstance(value, int) and '_' in value_orig: value = value_orig return value
[ "def", "_convert_value", "(", "self", ",", "value_orig", ")", ":", "import", "ast", "if", "value_orig", "is", "None", ":", "return", "value_orig", "if", "value_orig", ".", "startswith", "(", "\"'\"", ")", "and", "value_orig", ".", "endswith", "(", "\"'\"", ")", ":", "value", "=", "value_orig", "[", "1", ":", "-", "1", "]", "else", ":", "try", ":", "avalue", "=", "ast", ".", "parse", "(", "value_orig", ")", ".", "body", "[", "0", "]", ".", "value", "if", "isinstance", "(", "avalue", ",", "ast", ".", "BinOp", ")", ":", "# this is probably a string that happens to look like", "# a binary operation, e.g. '25-3'", "value", "=", "value_orig", "else", ":", "value", "=", "ast", ".", "literal_eval", "(", "value_orig", ")", "except", "Exception", ":", "value", "=", "self", ".", "_convert_string", "(", "value_orig", ")", "if", "isinstance", "(", "value", ",", "int", ")", "and", "'_'", "in", "value_orig", ":", "value", "=", "value_orig", "return", "value" ]
things like 6 and 1.25 are converted with ast.literal_value Things like 'hello' are stripped of quotes
[ "things", "like", "6", "and", "1", ".", "25", "are", "converted", "with", "ast", ".", "literal_value" ]
a6f07919f457a282fe240adad9d2c30906b71a15
https://github.com/esheldon/fitsio/blob/a6f07919f457a282fe240adad9d2c30906b71a15/fitsio/header.py#L641-L669
2,204
sanger-pathogens/ariba
ariba/cluster.py
Cluster._make_reads_for_assembly
def _make_reads_for_assembly(number_of_wanted_reads, total_reads, reads_in1, reads_in2, reads_out1, reads_out2, random_seed=None): '''Makes fastq files that are random subset of input files. Returns total number of reads in output files. If the number of wanted reads is >= total reads, then just makes symlinks instead of making new copies of the input files.''' random.seed(random_seed) if number_of_wanted_reads < total_reads: reads_written = 0 percent_wanted = 100 * number_of_wanted_reads / total_reads file_reader1 = pyfastaq.sequences.file_reader(reads_in1) file_reader2 = pyfastaq.sequences.file_reader(reads_in2) out1 = pyfastaq.utils.open_file_write(reads_out1) out2 = pyfastaq.utils.open_file_write(reads_out2) for read1 in file_reader1: try: read2 = next(file_reader2) except StopIteration: pyfastaq.utils.close(out1) pyfastaq.utils.close(out2) raise Error('Error subsetting reads. No mate found for read ' + read1.id) if random.randint(0, 100) <= percent_wanted: print(read1, file=out1) print(read2, file=out2) reads_written += 2 pyfastaq.utils.close(out1) pyfastaq.utils.close(out2) return reads_written else: os.symlink(reads_in1, reads_out1) os.symlink(reads_in2, reads_out2) return total_reads
python
def _make_reads_for_assembly(number_of_wanted_reads, total_reads, reads_in1, reads_in2, reads_out1, reads_out2, random_seed=None): '''Makes fastq files that are random subset of input files. Returns total number of reads in output files. If the number of wanted reads is >= total reads, then just makes symlinks instead of making new copies of the input files.''' random.seed(random_seed) if number_of_wanted_reads < total_reads: reads_written = 0 percent_wanted = 100 * number_of_wanted_reads / total_reads file_reader1 = pyfastaq.sequences.file_reader(reads_in1) file_reader2 = pyfastaq.sequences.file_reader(reads_in2) out1 = pyfastaq.utils.open_file_write(reads_out1) out2 = pyfastaq.utils.open_file_write(reads_out2) for read1 in file_reader1: try: read2 = next(file_reader2) except StopIteration: pyfastaq.utils.close(out1) pyfastaq.utils.close(out2) raise Error('Error subsetting reads. No mate found for read ' + read1.id) if random.randint(0, 100) <= percent_wanted: print(read1, file=out1) print(read2, file=out2) reads_written += 2 pyfastaq.utils.close(out1) pyfastaq.utils.close(out2) return reads_written else: os.symlink(reads_in1, reads_out1) os.symlink(reads_in2, reads_out2) return total_reads
[ "def", "_make_reads_for_assembly", "(", "number_of_wanted_reads", ",", "total_reads", ",", "reads_in1", ",", "reads_in2", ",", "reads_out1", ",", "reads_out2", ",", "random_seed", "=", "None", ")", ":", "random", ".", "seed", "(", "random_seed", ")", "if", "number_of_wanted_reads", "<", "total_reads", ":", "reads_written", "=", "0", "percent_wanted", "=", "100", "*", "number_of_wanted_reads", "/", "total_reads", "file_reader1", "=", "pyfastaq", ".", "sequences", ".", "file_reader", "(", "reads_in1", ")", "file_reader2", "=", "pyfastaq", ".", "sequences", ".", "file_reader", "(", "reads_in2", ")", "out1", "=", "pyfastaq", ".", "utils", ".", "open_file_write", "(", "reads_out1", ")", "out2", "=", "pyfastaq", ".", "utils", ".", "open_file_write", "(", "reads_out2", ")", "for", "read1", "in", "file_reader1", ":", "try", ":", "read2", "=", "next", "(", "file_reader2", ")", "except", "StopIteration", ":", "pyfastaq", ".", "utils", ".", "close", "(", "out1", ")", "pyfastaq", ".", "utils", ".", "close", "(", "out2", ")", "raise", "Error", "(", "'Error subsetting reads. No mate found for read '", "+", "read1", ".", "id", ")", "if", "random", ".", "randint", "(", "0", ",", "100", ")", "<=", "percent_wanted", ":", "print", "(", "read1", ",", "file", "=", "out1", ")", "print", "(", "read2", ",", "file", "=", "out2", ")", "reads_written", "+=", "2", "pyfastaq", ".", "utils", ".", "close", "(", "out1", ")", "pyfastaq", ".", "utils", ".", "close", "(", "out2", ")", "return", "reads_written", "else", ":", "os", ".", "symlink", "(", "reads_in1", ",", "reads_out1", ")", "os", ".", "symlink", "(", "reads_in2", ",", "reads_out2", ")", "return", "total_reads" ]
Makes fastq files that are random subset of input files. Returns total number of reads in output files. If the number of wanted reads is >= total reads, then just makes symlinks instead of making new copies of the input files.
[ "Makes", "fastq", "files", "that", "are", "random", "subset", "of", "input", "files", ".", "Returns", "total", "number", "of", "reads", "in", "output", "files", ".", "If", "the", "number", "of", "wanted", "reads", "is", ">", "=", "total", "reads", "then", "just", "makes", "symlinks", "instead", "of", "making", "new", "copies", "of", "the", "input", "files", "." ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/cluster.py#L262-L295
2,205
sanger-pathogens/ariba
ariba/tb.py
load_mutations
def load_mutations(gene_coords, mutation_to_drug_json, variants_txt, upstream_before=100): '''Load mutations from "mykrobe-style" files. mutation_to_drug_json is json file of mutation -> list of drugs. variants_txt is text file of variants used my mykrobe's make probes. gene_coords should be dict of gene coords made by the function genbank_to_gene_coords''' with open(mutation_to_drug_json) as f: drug_data = json.load(f) mutations = [] genes_with_indels = set() genes_need_upstream = set() genes_non_upstream = set() with open(variants_txt) as f: for line in f: gene, variant, d_or_p = line.rstrip().split('\t') coding = 0 if gene == 'rrs' else 1 d = {'gene': gene, 'var': variant, 'coding': coding, 'upstream': False} drug_data_key = d['gene'] + '_' + d['var'] if drug_data_key not in drug_data: print('KEY', drug_data_key, 'NOT FOUND', file=sys.stderr) else: d['drugs'] = ','.join(sorted(drug_data[drug_data_key])) if d_or_p == 'DNA' and gene != 'rrs': assert gene != 'rrs' re_match = re.match('([ACGT]+)(-?[0-9]+)([ACGTX]+)', d['var']) try: ref, pos, alt = re_match.groups() except: print('regex error:', d['var'], file=sys.stderr) continue pos = int(pos) if len(ref) != len(alt): genes_with_indels.add(d['gene']) continue elif pos > 0: #print('ignoring synonymous change (not implemented):', d['gene'], d['var'], d['drugs'], file=sys.stderr) continue elif pos < 0: this_gene_coords = gene_coords[d['gene']] d['upstream'] = True if this_gene_coords['start'] < this_gene_coords['end']: variant_pos_in_output_seq = upstream_before + pos + 1 else: variant_pos_in_output_seq = upstream_before + pos + 1 assert variant_pos_in_output_seq > 0 d['var'] = ref + str(variant_pos_in_output_seq) + alt d['original_mutation'] = variant genes_need_upstream.add(d['gene']) elif pos == 0: print('Zero coord!', d, file=sys.stderr) continue else: print('deal with?', d, file=sys.stderr) continue mutations.append(d) if not d['upstream']: genes_non_upstream.add(d['gene']) return mutations, genes_with_indels, genes_need_upstream, genes_non_upstream
python
def load_mutations(gene_coords, mutation_to_drug_json, variants_txt, upstream_before=100): '''Load mutations from "mykrobe-style" files. mutation_to_drug_json is json file of mutation -> list of drugs. variants_txt is text file of variants used my mykrobe's make probes. gene_coords should be dict of gene coords made by the function genbank_to_gene_coords''' with open(mutation_to_drug_json) as f: drug_data = json.load(f) mutations = [] genes_with_indels = set() genes_need_upstream = set() genes_non_upstream = set() with open(variants_txt) as f: for line in f: gene, variant, d_or_p = line.rstrip().split('\t') coding = 0 if gene == 'rrs' else 1 d = {'gene': gene, 'var': variant, 'coding': coding, 'upstream': False} drug_data_key = d['gene'] + '_' + d['var'] if drug_data_key not in drug_data: print('KEY', drug_data_key, 'NOT FOUND', file=sys.stderr) else: d['drugs'] = ','.join(sorted(drug_data[drug_data_key])) if d_or_p == 'DNA' and gene != 'rrs': assert gene != 'rrs' re_match = re.match('([ACGT]+)(-?[0-9]+)([ACGTX]+)', d['var']) try: ref, pos, alt = re_match.groups() except: print('regex error:', d['var'], file=sys.stderr) continue pos = int(pos) if len(ref) != len(alt): genes_with_indels.add(d['gene']) continue elif pos > 0: #print('ignoring synonymous change (not implemented):', d['gene'], d['var'], d['drugs'], file=sys.stderr) continue elif pos < 0: this_gene_coords = gene_coords[d['gene']] d['upstream'] = True if this_gene_coords['start'] < this_gene_coords['end']: variant_pos_in_output_seq = upstream_before + pos + 1 else: variant_pos_in_output_seq = upstream_before + pos + 1 assert variant_pos_in_output_seq > 0 d['var'] = ref + str(variant_pos_in_output_seq) + alt d['original_mutation'] = variant genes_need_upstream.add(d['gene']) elif pos == 0: print('Zero coord!', d, file=sys.stderr) continue else: print('deal with?', d, file=sys.stderr) continue mutations.append(d) if not d['upstream']: genes_non_upstream.add(d['gene']) return mutations, genes_with_indels, genes_need_upstream, genes_non_upstream
[ "def", "load_mutations", "(", "gene_coords", ",", "mutation_to_drug_json", ",", "variants_txt", ",", "upstream_before", "=", "100", ")", ":", "with", "open", "(", "mutation_to_drug_json", ")", "as", "f", ":", "drug_data", "=", "json", ".", "load", "(", "f", ")", "mutations", "=", "[", "]", "genes_with_indels", "=", "set", "(", ")", "genes_need_upstream", "=", "set", "(", ")", "genes_non_upstream", "=", "set", "(", ")", "with", "open", "(", "variants_txt", ")", "as", "f", ":", "for", "line", "in", "f", ":", "gene", ",", "variant", ",", "d_or_p", "=", "line", ".", "rstrip", "(", ")", ".", "split", "(", "'\\t'", ")", "coding", "=", "0", "if", "gene", "==", "'rrs'", "else", "1", "d", "=", "{", "'gene'", ":", "gene", ",", "'var'", ":", "variant", ",", "'coding'", ":", "coding", ",", "'upstream'", ":", "False", "}", "drug_data_key", "=", "d", "[", "'gene'", "]", "+", "'_'", "+", "d", "[", "'var'", "]", "if", "drug_data_key", "not", "in", "drug_data", ":", "print", "(", "'KEY'", ",", "drug_data_key", ",", "'NOT FOUND'", ",", "file", "=", "sys", ".", "stderr", ")", "else", ":", "d", "[", "'drugs'", "]", "=", "','", ".", "join", "(", "sorted", "(", "drug_data", "[", "drug_data_key", "]", ")", ")", "if", "d_or_p", "==", "'DNA'", "and", "gene", "!=", "'rrs'", ":", "assert", "gene", "!=", "'rrs'", "re_match", "=", "re", ".", "match", "(", "'([ACGT]+)(-?[0-9]+)([ACGTX]+)'", ",", "d", "[", "'var'", "]", ")", "try", ":", "ref", ",", "pos", ",", "alt", "=", "re_match", ".", "groups", "(", ")", "except", ":", "print", "(", "'regex error:'", ",", "d", "[", "'var'", "]", ",", "file", "=", "sys", ".", "stderr", ")", "continue", "pos", "=", "int", "(", "pos", ")", "if", "len", "(", "ref", ")", "!=", "len", "(", "alt", ")", ":", "genes_with_indels", ".", "add", "(", "d", "[", "'gene'", "]", ")", "continue", "elif", "pos", ">", "0", ":", "#print('ignoring synonymous change (not implemented):', d['gene'], d['var'], d['drugs'], file=sys.stderr)", "continue", "elif", "pos", "<", "0", ":", "this_gene_coords", "=", "gene_coords", "[", "d", "[", "'gene'", "]", "]", "d", "[", "'upstream'", "]", "=", "True", "if", "this_gene_coords", "[", "'start'", "]", "<", "this_gene_coords", "[", "'end'", "]", ":", "variant_pos_in_output_seq", "=", "upstream_before", "+", "pos", "+", "1", "else", ":", "variant_pos_in_output_seq", "=", "upstream_before", "+", "pos", "+", "1", "assert", "variant_pos_in_output_seq", ">", "0", "d", "[", "'var'", "]", "=", "ref", "+", "str", "(", "variant_pos_in_output_seq", ")", "+", "alt", "d", "[", "'original_mutation'", "]", "=", "variant", "genes_need_upstream", ".", "add", "(", "d", "[", "'gene'", "]", ")", "elif", "pos", "==", "0", ":", "print", "(", "'Zero coord!'", ",", "d", ",", "file", "=", "sys", ".", "stderr", ")", "continue", "else", ":", "print", "(", "'deal with?'", ",", "d", ",", "file", "=", "sys", ".", "stderr", ")", "continue", "mutations", ".", "append", "(", "d", ")", "if", "not", "d", "[", "'upstream'", "]", ":", "genes_non_upstream", ".", "add", "(", "d", "[", "'gene'", "]", ")", "return", "mutations", ",", "genes_with_indels", ",", "genes_need_upstream", ",", "genes_non_upstream" ]
Load mutations from "mykrobe-style" files. mutation_to_drug_json is json file of mutation -> list of drugs. variants_txt is text file of variants used my mykrobe's make probes. gene_coords should be dict of gene coords made by the function genbank_to_gene_coords
[ "Load", "mutations", "from", "mykrobe", "-", "style", "files", ".", "mutation_to_drug_json", "is", "json", "file", "of", "mutation", "-", ">", "list", "of", "drugs", ".", "variants_txt", "is", "text", "file", "of", "variants", "used", "my", "mykrobe", "s", "make", "probes", ".", "gene_coords", "should", "be", "dict", "of", "gene", "coords", "made", "by", "the", "function", "genbank_to_gene_coords" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/tb.py#L80-L142
2,206
sanger-pathogens/ariba
ariba/tb.py
write_prepareref_fasta_file
def write_prepareref_fasta_file(outfile, gene_coords, genes_need_upstream, genes_non_upstream, upstream_before=100, upstream_after=100): '''Writes fasta file to be used with -f option of prepareref''' tmp_dict = {} fasta_in = os.path.join(data_dir, 'NC_000962.3.fa.gz') pyfastaq.tasks.file_to_dict(fasta_in, tmp_dict) ref_seq = tmp_dict['NC_000962.3'] with open(outfile, 'w') as f: for gene in genes_non_upstream: start = gene_coords[gene]['start'] end = gene_coords[gene]['end'] if start < end: gene_fa = pyfastaq.sequences.Fasta(gene, ref_seq[start:end+1]) else: gene_fa = pyfastaq.sequences.Fasta(gene, ref_seq[end:start+1]) gene_fa.revcomp() print(gene_fa, file=f) for gene in genes_need_upstream: start = gene_coords[gene]['start'] end = gene_coords[gene]['end'] if start < end: gene_fa = pyfastaq.sequences.Fasta(gene, ref_seq[start - upstream_before:start + upstream_after]) else: gene_fa = pyfastaq.sequences.Fasta(gene, ref_seq[start - upstream_after + 1:start + upstream_before + 1]) gene_fa.revcomp() gene_fa.id += '_upstream' print(gene_fa, file=f)
python
def write_prepareref_fasta_file(outfile, gene_coords, genes_need_upstream, genes_non_upstream, upstream_before=100, upstream_after=100): '''Writes fasta file to be used with -f option of prepareref''' tmp_dict = {} fasta_in = os.path.join(data_dir, 'NC_000962.3.fa.gz') pyfastaq.tasks.file_to_dict(fasta_in, tmp_dict) ref_seq = tmp_dict['NC_000962.3'] with open(outfile, 'w') as f: for gene in genes_non_upstream: start = gene_coords[gene]['start'] end = gene_coords[gene]['end'] if start < end: gene_fa = pyfastaq.sequences.Fasta(gene, ref_seq[start:end+1]) else: gene_fa = pyfastaq.sequences.Fasta(gene, ref_seq[end:start+1]) gene_fa.revcomp() print(gene_fa, file=f) for gene in genes_need_upstream: start = gene_coords[gene]['start'] end = gene_coords[gene]['end'] if start < end: gene_fa = pyfastaq.sequences.Fasta(gene, ref_seq[start - upstream_before:start + upstream_after]) else: gene_fa = pyfastaq.sequences.Fasta(gene, ref_seq[start - upstream_after + 1:start + upstream_before + 1]) gene_fa.revcomp() gene_fa.id += '_upstream' print(gene_fa, file=f)
[ "def", "write_prepareref_fasta_file", "(", "outfile", ",", "gene_coords", ",", "genes_need_upstream", ",", "genes_non_upstream", ",", "upstream_before", "=", "100", ",", "upstream_after", "=", "100", ")", ":", "tmp_dict", "=", "{", "}", "fasta_in", "=", "os", ".", "path", ".", "join", "(", "data_dir", ",", "'NC_000962.3.fa.gz'", ")", "pyfastaq", ".", "tasks", ".", "file_to_dict", "(", "fasta_in", ",", "tmp_dict", ")", "ref_seq", "=", "tmp_dict", "[", "'NC_000962.3'", "]", "with", "open", "(", "outfile", ",", "'w'", ")", "as", "f", ":", "for", "gene", "in", "genes_non_upstream", ":", "start", "=", "gene_coords", "[", "gene", "]", "[", "'start'", "]", "end", "=", "gene_coords", "[", "gene", "]", "[", "'end'", "]", "if", "start", "<", "end", ":", "gene_fa", "=", "pyfastaq", ".", "sequences", ".", "Fasta", "(", "gene", ",", "ref_seq", "[", "start", ":", "end", "+", "1", "]", ")", "else", ":", "gene_fa", "=", "pyfastaq", ".", "sequences", ".", "Fasta", "(", "gene", ",", "ref_seq", "[", "end", ":", "start", "+", "1", "]", ")", "gene_fa", ".", "revcomp", "(", ")", "print", "(", "gene_fa", ",", "file", "=", "f", ")", "for", "gene", "in", "genes_need_upstream", ":", "start", "=", "gene_coords", "[", "gene", "]", "[", "'start'", "]", "end", "=", "gene_coords", "[", "gene", "]", "[", "'end'", "]", "if", "start", "<", "end", ":", "gene_fa", "=", "pyfastaq", ".", "sequences", ".", "Fasta", "(", "gene", ",", "ref_seq", "[", "start", "-", "upstream_before", ":", "start", "+", "upstream_after", "]", ")", "else", ":", "gene_fa", "=", "pyfastaq", ".", "sequences", ".", "Fasta", "(", "gene", ",", "ref_seq", "[", "start", "-", "upstream_after", "+", "1", ":", "start", "+", "upstream_before", "+", "1", "]", ")", "gene_fa", ".", "revcomp", "(", ")", "gene_fa", ".", "id", "+=", "'_upstream'", "print", "(", "gene_fa", ",", "file", "=", "f", ")" ]
Writes fasta file to be used with -f option of prepareref
[ "Writes", "fasta", "file", "to", "be", "used", "with", "-", "f", "option", "of", "prepareref" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/tb.py#L145-L174
2,207
sanger-pathogens/ariba
ariba/summary_cluster.py
SummaryCluster._get_known_noncoding_het_snp
def _get_known_noncoding_het_snp(data_dict): '''If ref is coding, return None. If the data dict has a known snp, and samtools made a call, then return the string ref_name_change and the % of reads supporting the variant type. If noncoding, but no samtools call, then return None''' if data_dict['gene'] == '1': return None if data_dict['known_var'] == '1' and data_dict['ref_ctg_effect'] == 'SNP' \ and data_dict['smtls_nts'] != '.' and ';' not in data_dict['smtls_nts']: nucleotides = data_dict['smtls_nts'].split(',') depths = data_dict['smtls_nts_depth'].split(',') if len(nucleotides) != len(depths): raise Error('Mismatch in number of inferred nucleotides from ctg_nt, smtls_nts, smtls_nts_depth columns. Cannot continue\n' + str(data_dict)) try: var_nucleotide = data_dict['known_var_change'][-1] depths = [int(x) for x in depths] nuc_to_depth = dict(zip(nucleotides, depths)) total_depth = sum(depths) var_depth = nuc_to_depth.get(var_nucleotide, 0) percent_depth = round(100 * var_depth / total_depth, 1) except: return None return data_dict['known_var_change'], percent_depth else: return None
python
def _get_known_noncoding_het_snp(data_dict): '''If ref is coding, return None. If the data dict has a known snp, and samtools made a call, then return the string ref_name_change and the % of reads supporting the variant type. If noncoding, but no samtools call, then return None''' if data_dict['gene'] == '1': return None if data_dict['known_var'] == '1' and data_dict['ref_ctg_effect'] == 'SNP' \ and data_dict['smtls_nts'] != '.' and ';' not in data_dict['smtls_nts']: nucleotides = data_dict['smtls_nts'].split(',') depths = data_dict['smtls_nts_depth'].split(',') if len(nucleotides) != len(depths): raise Error('Mismatch in number of inferred nucleotides from ctg_nt, smtls_nts, smtls_nts_depth columns. Cannot continue\n' + str(data_dict)) try: var_nucleotide = data_dict['known_var_change'][-1] depths = [int(x) for x in depths] nuc_to_depth = dict(zip(nucleotides, depths)) total_depth = sum(depths) var_depth = nuc_to_depth.get(var_nucleotide, 0) percent_depth = round(100 * var_depth / total_depth, 1) except: return None return data_dict['known_var_change'], percent_depth else: return None
[ "def", "_get_known_noncoding_het_snp", "(", "data_dict", ")", ":", "if", "data_dict", "[", "'gene'", "]", "==", "'1'", ":", "return", "None", "if", "data_dict", "[", "'known_var'", "]", "==", "'1'", "and", "data_dict", "[", "'ref_ctg_effect'", "]", "==", "'SNP'", "and", "data_dict", "[", "'smtls_nts'", "]", "!=", "'.'", "and", "';'", "not", "in", "data_dict", "[", "'smtls_nts'", "]", ":", "nucleotides", "=", "data_dict", "[", "'smtls_nts'", "]", ".", "split", "(", "','", ")", "depths", "=", "data_dict", "[", "'smtls_nts_depth'", "]", ".", "split", "(", "','", ")", "if", "len", "(", "nucleotides", ")", "!=", "len", "(", "depths", ")", ":", "raise", "Error", "(", "'Mismatch in number of inferred nucleotides from ctg_nt, smtls_nts, smtls_nts_depth columns. Cannot continue\\n'", "+", "str", "(", "data_dict", ")", ")", "try", ":", "var_nucleotide", "=", "data_dict", "[", "'known_var_change'", "]", "[", "-", "1", "]", "depths", "=", "[", "int", "(", "x", ")", "for", "x", "in", "depths", "]", "nuc_to_depth", "=", "dict", "(", "zip", "(", "nucleotides", ",", "depths", ")", ")", "total_depth", "=", "sum", "(", "depths", ")", "var_depth", "=", "nuc_to_depth", ".", "get", "(", "var_nucleotide", ",", "0", ")", "percent_depth", "=", "round", "(", "100", "*", "var_depth", "/", "total_depth", ",", "1", ")", "except", ":", "return", "None", "return", "data_dict", "[", "'known_var_change'", "]", ",", "percent_depth", "else", ":", "return", "None" ]
If ref is coding, return None. If the data dict has a known snp, and samtools made a call, then return the string ref_name_change and the % of reads supporting the variant type. If noncoding, but no samtools call, then return None
[ "If", "ref", "is", "coding", "return", "None", ".", "If", "the", "data", "dict", "has", "a", "known", "snp", "and", "samtools", "made", "a", "call", "then", "return", "the", "string", "ref_name_change", "and", "the", "%", "of", "reads", "supporting", "the", "variant", "type", ".", "If", "noncoding", "but", "no", "samtools", "call", "then", "return", "None" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/summary_cluster.py#L236-L264
2,208
sanger-pathogens/ariba
ariba/summary_cluster.py
SummaryCluster._has_match
def _has_match(self, assembled_summary): '''assembled_summary should be output of _to_cluster_summary_assembled''' if assembled_summary.startswith('yes'): if self.data[0]['var_only'] == '0' or self._to_cluster_summary_has_known_nonsynonymous(assembled_summary) == 'yes': return 'yes' else: return 'no' else: return 'no'
python
def _has_match(self, assembled_summary): '''assembled_summary should be output of _to_cluster_summary_assembled''' if assembled_summary.startswith('yes'): if self.data[0]['var_only'] == '0' or self._to_cluster_summary_has_known_nonsynonymous(assembled_summary) == 'yes': return 'yes' else: return 'no' else: return 'no'
[ "def", "_has_match", "(", "self", ",", "assembled_summary", ")", ":", "if", "assembled_summary", ".", "startswith", "(", "'yes'", ")", ":", "if", "self", ".", "data", "[", "0", "]", "[", "'var_only'", "]", "==", "'0'", "or", "self", ".", "_to_cluster_summary_has_known_nonsynonymous", "(", "assembled_summary", ")", "==", "'yes'", ":", "return", "'yes'", "else", ":", "return", "'no'", "else", ":", "return", "'no'" ]
assembled_summary should be output of _to_cluster_summary_assembled
[ "assembled_summary", "should", "be", "output", "of", "_to_cluster_summary_assembled" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/summary_cluster.py#L298-L306
2,209
sanger-pathogens/ariba
ariba/summary_cluster.py
SummaryCluster.has_var_groups
def has_var_groups(self): '''Returns a set of the variant group ids that this cluster has''' ids = set() for d in self.data: if self._has_known_variant(d) != 'no' and d['var_group'] != '.': ids.add(d['var_group']) return ids
python
def has_var_groups(self): '''Returns a set of the variant group ids that this cluster has''' ids = set() for d in self.data: if self._has_known_variant(d) != 'no' and d['var_group'] != '.': ids.add(d['var_group']) return ids
[ "def", "has_var_groups", "(", "self", ")", ":", "ids", "=", "set", "(", ")", "for", "d", "in", "self", ".", "data", ":", "if", "self", ".", "_has_known_variant", "(", "d", ")", "!=", "'no'", "and", "d", "[", "'var_group'", "]", "!=", "'.'", ":", "ids", ".", "add", "(", "d", "[", "'var_group'", "]", ")", "return", "ids" ]
Returns a set of the variant group ids that this cluster has
[ "Returns", "a", "set", "of", "the", "variant", "group", "ids", "that", "this", "cluster", "has" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/summary_cluster.py#L309-L315
2,210
sanger-pathogens/ariba
ariba/summary_cluster.py
SummaryCluster.column_summary_data
def column_summary_data(self): '''Returns a dictionary of column name -> value, for cluster-level results''' assembled_summary = self._to_cluster_summary_assembled() pct_id, read_depth = self._pc_id_and_read_depth_of_longest() columns = { 'assembled': self._to_cluster_summary_assembled(), 'match': self._has_match(assembled_summary), 'ref_seq': self.ref_name, 'pct_id': str(pct_id), 'ctg_cov': str(read_depth), 'known_var': self._to_cluster_summary_has_known_nonsynonymous(assembled_summary), 'novel_var': self._to_cluster_summary_has_novel_nonsynonymous(assembled_summary) } return columns
python
def column_summary_data(self): '''Returns a dictionary of column name -> value, for cluster-level results''' assembled_summary = self._to_cluster_summary_assembled() pct_id, read_depth = self._pc_id_and_read_depth_of_longest() columns = { 'assembled': self._to_cluster_summary_assembled(), 'match': self._has_match(assembled_summary), 'ref_seq': self.ref_name, 'pct_id': str(pct_id), 'ctg_cov': str(read_depth), 'known_var': self._to_cluster_summary_has_known_nonsynonymous(assembled_summary), 'novel_var': self._to_cluster_summary_has_novel_nonsynonymous(assembled_summary) } return columns
[ "def", "column_summary_data", "(", "self", ")", ":", "assembled_summary", "=", "self", ".", "_to_cluster_summary_assembled", "(", ")", "pct_id", ",", "read_depth", "=", "self", ".", "_pc_id_and_read_depth_of_longest", "(", ")", "columns", "=", "{", "'assembled'", ":", "self", ".", "_to_cluster_summary_assembled", "(", ")", ",", "'match'", ":", "self", ".", "_has_match", "(", "assembled_summary", ")", ",", "'ref_seq'", ":", "self", ".", "ref_name", ",", "'pct_id'", ":", "str", "(", "pct_id", ")", ",", "'ctg_cov'", ":", "str", "(", "read_depth", ")", ",", "'known_var'", ":", "self", ".", "_to_cluster_summary_has_known_nonsynonymous", "(", "assembled_summary", ")", ",", "'novel_var'", ":", "self", ".", "_to_cluster_summary_has_novel_nonsynonymous", "(", "assembled_summary", ")", "}", "return", "columns" ]
Returns a dictionary of column name -> value, for cluster-level results
[ "Returns", "a", "dictionary", "of", "column", "name", "-", ">", "value", "for", "cluster", "-", "level", "results" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/summary_cluster.py#L318-L333
2,211
sanger-pathogens/ariba
ariba/common.py
cat_files
def cat_files(infiles, outfile): '''Cats all files in list infiles into outfile''' f_out = pyfastaq.utils.open_file_write(outfile) for filename in infiles: if os.path.exists(filename): f_in = pyfastaq.utils.open_file_read(filename) for line in f_in: print(line, end='', file=f_out) pyfastaq.utils.close(f_in) pyfastaq.utils.close(f_out)
python
def cat_files(infiles, outfile): '''Cats all files in list infiles into outfile''' f_out = pyfastaq.utils.open_file_write(outfile) for filename in infiles: if os.path.exists(filename): f_in = pyfastaq.utils.open_file_read(filename) for line in f_in: print(line, end='', file=f_out) pyfastaq.utils.close(f_in) pyfastaq.utils.close(f_out)
[ "def", "cat_files", "(", "infiles", ",", "outfile", ")", ":", "f_out", "=", "pyfastaq", ".", "utils", ".", "open_file_write", "(", "outfile", ")", "for", "filename", "in", "infiles", ":", "if", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "f_in", "=", "pyfastaq", ".", "utils", ".", "open_file_read", "(", "filename", ")", "for", "line", "in", "f_in", ":", "print", "(", "line", ",", "end", "=", "''", ",", "file", "=", "f_out", ")", "pyfastaq", ".", "utils", ".", "close", "(", "f_in", ")", "pyfastaq", ".", "utils", ".", "close", "(", "f_out", ")" ]
Cats all files in list infiles into outfile
[ "Cats", "all", "files", "in", "list", "infiles", "into", "outfile" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/common.py#L45-L56
2,212
sanger-pathogens/ariba
ariba/assembly.py
Assembly._check_spades_log_file
def _check_spades_log_file(logfile): '''SPAdes can fail with a strange error. Stop everything if this happens''' f = pyfastaq.utils.open_file_read(logfile) for line in f: if line.startswith('== Error == system call for:') and line.rstrip().endswith('finished abnormally, err code: -7'): pyfastaq.utils.close(f) print('Error running SPAdes. Cannot continue. This is the error from the log file', logfile, '...', file=sys.stderr) print(line, file=sys.stderr) raise Error('Fatal error ("err code: -7") running spades. Cannot continue') pyfastaq.utils.close(f) return True
python
def _check_spades_log_file(logfile): '''SPAdes can fail with a strange error. Stop everything if this happens''' f = pyfastaq.utils.open_file_read(logfile) for line in f: if line.startswith('== Error == system call for:') and line.rstrip().endswith('finished abnormally, err code: -7'): pyfastaq.utils.close(f) print('Error running SPAdes. Cannot continue. This is the error from the log file', logfile, '...', file=sys.stderr) print(line, file=sys.stderr) raise Error('Fatal error ("err code: -7") running spades. Cannot continue') pyfastaq.utils.close(f) return True
[ "def", "_check_spades_log_file", "(", "logfile", ")", ":", "f", "=", "pyfastaq", ".", "utils", ".", "open_file_read", "(", "logfile", ")", "for", "line", "in", "f", ":", "if", "line", ".", "startswith", "(", "'== Error == system call for:'", ")", "and", "line", ".", "rstrip", "(", ")", ".", "endswith", "(", "'finished abnormally, err code: -7'", ")", ":", "pyfastaq", ".", "utils", ".", "close", "(", "f", ")", "print", "(", "'Error running SPAdes. Cannot continue. This is the error from the log file'", ",", "logfile", ",", "'...'", ",", "file", "=", "sys", ".", "stderr", ")", "print", "(", "line", ",", "file", "=", "sys", ".", "stderr", ")", "raise", "Error", "(", "'Fatal error (\"err code: -7\") running spades. Cannot continue'", ")", "pyfastaq", ".", "utils", ".", "close", "(", "f", ")", "return", "True" ]
SPAdes can fail with a strange error. Stop everything if this happens
[ "SPAdes", "can", "fail", "with", "a", "strange", "error", ".", "Stop", "everything", "if", "this", "happens" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/assembly.py#L104-L116
2,213
sanger-pathogens/ariba
ariba/assembly.py
Assembly._fix_contig_orientation
def _fix_contig_orientation(contigs_fa, ref_fa, outfile, min_id=90, min_length=20, breaklen=200): '''Changes orientation of each contig to match the reference, when possible. Returns a set of names of contigs that had hits in both orientations to the reference''' if not os.path.exists(contigs_fa): raise Error('Cannot fix orientation of assembly contigs because file not found: ' + contigs_fa) tmp_coords = os.path.join(outfile + '.tmp.rename.coords') pymummer.nucmer.Runner( ref_fa, contigs_fa, tmp_coords, min_id=min_id, min_length=min_length, breaklen=breaklen, maxmatch=True, ).run() to_revcomp = set() not_revcomp = set() file_reader = pymummer.coords_file.reader(tmp_coords) for hit in file_reader: if hit.on_same_strand(): not_revcomp.add(hit.qry_name) else: to_revcomp.add(hit.qry_name) os.unlink(tmp_coords) in_both = to_revcomp.intersection(not_revcomp) f = pyfastaq.utils.open_file_write(outfile) seq_reader = pyfastaq.sequences.file_reader(contigs_fa) for seq in seq_reader: if seq.id in to_revcomp and seq.id not in in_both: seq.revcomp() print(seq, file=f) pyfastaq.utils.close(f) return in_both
python
def _fix_contig_orientation(contigs_fa, ref_fa, outfile, min_id=90, min_length=20, breaklen=200): '''Changes orientation of each contig to match the reference, when possible. Returns a set of names of contigs that had hits in both orientations to the reference''' if not os.path.exists(contigs_fa): raise Error('Cannot fix orientation of assembly contigs because file not found: ' + contigs_fa) tmp_coords = os.path.join(outfile + '.tmp.rename.coords') pymummer.nucmer.Runner( ref_fa, contigs_fa, tmp_coords, min_id=min_id, min_length=min_length, breaklen=breaklen, maxmatch=True, ).run() to_revcomp = set() not_revcomp = set() file_reader = pymummer.coords_file.reader(tmp_coords) for hit in file_reader: if hit.on_same_strand(): not_revcomp.add(hit.qry_name) else: to_revcomp.add(hit.qry_name) os.unlink(tmp_coords) in_both = to_revcomp.intersection(not_revcomp) f = pyfastaq.utils.open_file_write(outfile) seq_reader = pyfastaq.sequences.file_reader(contigs_fa) for seq in seq_reader: if seq.id in to_revcomp and seq.id not in in_both: seq.revcomp() print(seq, file=f) pyfastaq.utils.close(f) return in_both
[ "def", "_fix_contig_orientation", "(", "contigs_fa", ",", "ref_fa", ",", "outfile", ",", "min_id", "=", "90", ",", "min_length", "=", "20", ",", "breaklen", "=", "200", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "contigs_fa", ")", ":", "raise", "Error", "(", "'Cannot fix orientation of assembly contigs because file not found: '", "+", "contigs_fa", ")", "tmp_coords", "=", "os", ".", "path", ".", "join", "(", "outfile", "+", "'.tmp.rename.coords'", ")", "pymummer", ".", "nucmer", ".", "Runner", "(", "ref_fa", ",", "contigs_fa", ",", "tmp_coords", ",", "min_id", "=", "min_id", ",", "min_length", "=", "min_length", ",", "breaklen", "=", "breaklen", ",", "maxmatch", "=", "True", ",", ")", ".", "run", "(", ")", "to_revcomp", "=", "set", "(", ")", "not_revcomp", "=", "set", "(", ")", "file_reader", "=", "pymummer", ".", "coords_file", ".", "reader", "(", "tmp_coords", ")", "for", "hit", "in", "file_reader", ":", "if", "hit", ".", "on_same_strand", "(", ")", ":", "not_revcomp", ".", "add", "(", "hit", ".", "qry_name", ")", "else", ":", "to_revcomp", ".", "add", "(", "hit", ".", "qry_name", ")", "os", ".", "unlink", "(", "tmp_coords", ")", "in_both", "=", "to_revcomp", ".", "intersection", "(", "not_revcomp", ")", "f", "=", "pyfastaq", ".", "utils", ".", "open_file_write", "(", "outfile", ")", "seq_reader", "=", "pyfastaq", ".", "sequences", ".", "file_reader", "(", "contigs_fa", ")", "for", "seq", "in", "seq_reader", ":", "if", "seq", ".", "id", "in", "to_revcomp", "and", "seq", ".", "id", "not", "in", "in_both", ":", "seq", ".", "revcomp", "(", ")", "print", "(", "seq", ",", "file", "=", "f", ")", "pyfastaq", ".", "utils", ".", "close", "(", "f", ")", "return", "in_both" ]
Changes orientation of each contig to match the reference, when possible. Returns a set of names of contigs that had hits in both orientations to the reference
[ "Changes", "orientation", "of", "each", "contig", "to", "match", "the", "reference", "when", "possible", ".", "Returns", "a", "set", "of", "names", "of", "contigs", "that", "had", "hits", "in", "both", "orientations", "to", "the", "reference" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/assembly.py#L205-L242
2,214
sanger-pathogens/ariba
ariba/assembly_compare.py
AssemblyCompare._parse_nucmer_coords_file
def _parse_nucmer_coords_file(coords_file, ref_name): '''Input is coords file made by self._run_nucmer. Reference should have one sequence only. ref_name is name fo the reference sequence, to sanity check the coords file. Returns dictionary. Key = assembly contig name. Value = list of nucmer hits to that contig''' file_reader = pymummer.coords_file.reader(coords_file) nucmer_hits = {} for hit in file_reader: assert hit.ref_name == ref_name contig = hit.qry_name if contig not in nucmer_hits: nucmer_hits[contig] = [] nucmer_hits[contig].append(copy.copy(hit)) return nucmer_hits
python
def _parse_nucmer_coords_file(coords_file, ref_name): '''Input is coords file made by self._run_nucmer. Reference should have one sequence only. ref_name is name fo the reference sequence, to sanity check the coords file. Returns dictionary. Key = assembly contig name. Value = list of nucmer hits to that contig''' file_reader = pymummer.coords_file.reader(coords_file) nucmer_hits = {} for hit in file_reader: assert hit.ref_name == ref_name contig = hit.qry_name if contig not in nucmer_hits: nucmer_hits[contig] = [] nucmer_hits[contig].append(copy.copy(hit)) return nucmer_hits
[ "def", "_parse_nucmer_coords_file", "(", "coords_file", ",", "ref_name", ")", ":", "file_reader", "=", "pymummer", ".", "coords_file", ".", "reader", "(", "coords_file", ")", "nucmer_hits", "=", "{", "}", "for", "hit", "in", "file_reader", ":", "assert", "hit", ".", "ref_name", "==", "ref_name", "contig", "=", "hit", ".", "qry_name", "if", "contig", "not", "in", "nucmer_hits", ":", "nucmer_hits", "[", "contig", "]", "=", "[", "]", "nucmer_hits", "[", "contig", "]", ".", "append", "(", "copy", ".", "copy", "(", "hit", ")", ")", "return", "nucmer_hits" ]
Input is coords file made by self._run_nucmer. Reference should have one sequence only. ref_name is name fo the reference sequence, to sanity check the coords file. Returns dictionary. Key = assembly contig name. Value = list of nucmer hits to that contig
[ "Input", "is", "coords", "file", "made", "by", "self", ".", "_run_nucmer", ".", "Reference", "should", "have", "one", "sequence", "only", ".", "ref_name", "is", "name", "fo", "the", "reference", "sequence", "to", "sanity", "check", "the", "coords", "file", ".", "Returns", "dictionary", ".", "Key", "=", "assembly", "contig", "name", ".", "Value", "=", "list", "of", "nucmer", "hits", "to", "that", "contig" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/assembly_compare.py#L61-L74
2,215
sanger-pathogens/ariba
ariba/assembly_compare.py
AssemblyCompare._nucmer_hits_to_percent_identity
def _nucmer_hits_to_percent_identity(nucmer_hits): '''Input is hits made by self._parse_nucmer_coords_file. Returns dictionary. key = contig name. Value = percent identity of hits to that contig''' percent_identities = {} max_lengths = {} for contig in nucmer_hits: max_length = -1 percent_identity = 0 for hit in nucmer_hits[contig]: if hit.hit_length_qry > max_length: max_length = hit.hit_length_qry percent_identity = hit.percent_identity percent_identities[contig] = percent_identity return percent_identities
python
def _nucmer_hits_to_percent_identity(nucmer_hits): '''Input is hits made by self._parse_nucmer_coords_file. Returns dictionary. key = contig name. Value = percent identity of hits to that contig''' percent_identities = {} max_lengths = {} for contig in nucmer_hits: max_length = -1 percent_identity = 0 for hit in nucmer_hits[contig]: if hit.hit_length_qry > max_length: max_length = hit.hit_length_qry percent_identity = hit.percent_identity percent_identities[contig] = percent_identity return percent_identities
[ "def", "_nucmer_hits_to_percent_identity", "(", "nucmer_hits", ")", ":", "percent_identities", "=", "{", "}", "max_lengths", "=", "{", "}", "for", "contig", "in", "nucmer_hits", ":", "max_length", "=", "-", "1", "percent_identity", "=", "0", "for", "hit", "in", "nucmer_hits", "[", "contig", "]", ":", "if", "hit", ".", "hit_length_qry", ">", "max_length", ":", "max_length", "=", "hit", ".", "hit_length_qry", "percent_identity", "=", "hit", ".", "percent_identity", "percent_identities", "[", "contig", "]", "=", "percent_identity", "return", "percent_identities" ]
Input is hits made by self._parse_nucmer_coords_file. Returns dictionary. key = contig name. Value = percent identity of hits to that contig
[ "Input", "is", "hits", "made", "by", "self", ".", "_parse_nucmer_coords_file", ".", "Returns", "dictionary", ".", "key", "=", "contig", "name", ".", "Value", "=", "percent", "identity", "of", "hits", "to", "that", "contig" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/assembly_compare.py#L78-L93
2,216
sanger-pathogens/ariba
ariba/assembly_compare.py
AssemblyCompare._nucmer_hits_to_assembly_coords
def _nucmer_hits_to_assembly_coords(nucmer_hits): '''Input is hits made by self._parse_nucmer_coords_file. Returns dictionary. key = contig name. Value = list of coords that match to the reference gene''' coords = {} for l in nucmer_hits.values(): for hit in l: if hit.qry_name not in coords: coords[hit.qry_name] = [] coords[hit.qry_name].append(hit.qry_coords()) for scaff in coords: pyfastaq.intervals.merge_overlapping_in_list(coords[scaff]) return coords
python
def _nucmer_hits_to_assembly_coords(nucmer_hits): '''Input is hits made by self._parse_nucmer_coords_file. Returns dictionary. key = contig name. Value = list of coords that match to the reference gene''' coords = {} for l in nucmer_hits.values(): for hit in l: if hit.qry_name not in coords: coords[hit.qry_name] = [] coords[hit.qry_name].append(hit.qry_coords()) for scaff in coords: pyfastaq.intervals.merge_overlapping_in_list(coords[scaff]) return coords
[ "def", "_nucmer_hits_to_assembly_coords", "(", "nucmer_hits", ")", ":", "coords", "=", "{", "}", "for", "l", "in", "nucmer_hits", ".", "values", "(", ")", ":", "for", "hit", "in", "l", ":", "if", "hit", ".", "qry_name", "not", "in", "coords", ":", "coords", "[", "hit", ".", "qry_name", "]", "=", "[", "]", "coords", "[", "hit", ".", "qry_name", "]", ".", "append", "(", "hit", ".", "qry_coords", "(", ")", ")", "for", "scaff", "in", "coords", ":", "pyfastaq", ".", "intervals", ".", "merge_overlapping_in_list", "(", "coords", "[", "scaff", "]", ")", "return", "coords" ]
Input is hits made by self._parse_nucmer_coords_file. Returns dictionary. key = contig name. Value = list of coords that match to the reference gene
[ "Input", "is", "hits", "made", "by", "self", ".", "_parse_nucmer_coords_file", ".", "Returns", "dictionary", ".", "key", "=", "contig", "name", ".", "Value", "=", "list", "of", "coords", "that", "match", "to", "the", "reference", "gene" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/assembly_compare.py#L97-L111
2,217
sanger-pathogens/ariba
ariba/assembly_compare.py
AssemblyCompare.nucmer_hits_to_ref_coords
def nucmer_hits_to_ref_coords(cls, nucmer_hits, contig=None): '''Input is hits made by self._parse_nucmer_coords_file. Returns dictionary. Key = contig name. Value = list of coords in the reference sequence for that contig. if contig=contig_name, then just gets the ref coords from that contig, instead of using all the contigs''' coords = [] if contig is None: coords = {key: [] for key in nucmer_hits.keys()} else: coords = {contig: []} for key in coords: coords[key] = [hit.ref_coords() for hit in nucmer_hits[key]] pyfastaq.intervals.merge_overlapping_in_list(coords[key]) return coords
python
def nucmer_hits_to_ref_coords(cls, nucmer_hits, contig=None): '''Input is hits made by self._parse_nucmer_coords_file. Returns dictionary. Key = contig name. Value = list of coords in the reference sequence for that contig. if contig=contig_name, then just gets the ref coords from that contig, instead of using all the contigs''' coords = [] if contig is None: coords = {key: [] for key in nucmer_hits.keys()} else: coords = {contig: []} for key in coords: coords[key] = [hit.ref_coords() for hit in nucmer_hits[key]] pyfastaq.intervals.merge_overlapping_in_list(coords[key]) return coords
[ "def", "nucmer_hits_to_ref_coords", "(", "cls", ",", "nucmer_hits", ",", "contig", "=", "None", ")", ":", "coords", "=", "[", "]", "if", "contig", "is", "None", ":", "coords", "=", "{", "key", ":", "[", "]", "for", "key", "in", "nucmer_hits", ".", "keys", "(", ")", "}", "else", ":", "coords", "=", "{", "contig", ":", "[", "]", "}", "for", "key", "in", "coords", ":", "coords", "[", "key", "]", "=", "[", "hit", ".", "ref_coords", "(", ")", "for", "hit", "in", "nucmer_hits", "[", "key", "]", "]", "pyfastaq", ".", "intervals", ".", "merge_overlapping_in_list", "(", "coords", "[", "key", "]", ")", "return", "coords" ]
Input is hits made by self._parse_nucmer_coords_file. Returns dictionary. Key = contig name. Value = list of coords in the reference sequence for that contig. if contig=contig_name, then just gets the ref coords from that contig, instead of using all the contigs
[ "Input", "is", "hits", "made", "by", "self", ".", "_parse_nucmer_coords_file", ".", "Returns", "dictionary", ".", "Key", "=", "contig", "name", ".", "Value", "=", "list", "of", "coords", "in", "the", "reference", "sequence", "for", "that", "contig", ".", "if", "contig", "=", "contig_name", "then", "just", "gets", "the", "ref", "coords", "from", "that", "contig", "instead", "of", "using", "all", "the", "contigs" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/assembly_compare.py#L119-L135
2,218
sanger-pathogens/ariba
ariba/assembly_compare.py
AssemblyCompare.nucmer_hits_to_ref_and_qry_coords
def nucmer_hits_to_ref_and_qry_coords(cls, nucmer_hits, contig=None): '''Same as nucmer_hits_to_ref_coords, except removes containing hits first, and returns ref and qry coords lists''' if contig is None: ctg_coords = {key: [] for key in nucmer_hits.keys()} else: ctg_coords = {contig: []} ref_coords = {} for key in ctg_coords: hits = copy.copy(nucmer_hits[key]) hits.sort(key=lambda x: len(x.ref_coords())) if len(hits) > 1: i = 0 while i < len(hits) - 1: c1 = hits[i].ref_coords() c2 = hits[i+1].ref_coords() if c2.contains(c1): hits.pop(i) else: i += 1 ref_coords[key] = [hit.ref_coords() for hit in hits] ctg_coords[key] = [hit.qry_coords() for hit in hits] pyfastaq.intervals.merge_overlapping_in_list(ref_coords[key]) pyfastaq.intervals.merge_overlapping_in_list(ctg_coords[key]) return ctg_coords, ref_coords
python
def nucmer_hits_to_ref_and_qry_coords(cls, nucmer_hits, contig=None): '''Same as nucmer_hits_to_ref_coords, except removes containing hits first, and returns ref and qry coords lists''' if contig is None: ctg_coords = {key: [] for key in nucmer_hits.keys()} else: ctg_coords = {contig: []} ref_coords = {} for key in ctg_coords: hits = copy.copy(nucmer_hits[key]) hits.sort(key=lambda x: len(x.ref_coords())) if len(hits) > 1: i = 0 while i < len(hits) - 1: c1 = hits[i].ref_coords() c2 = hits[i+1].ref_coords() if c2.contains(c1): hits.pop(i) else: i += 1 ref_coords[key] = [hit.ref_coords() for hit in hits] ctg_coords[key] = [hit.qry_coords() for hit in hits] pyfastaq.intervals.merge_overlapping_in_list(ref_coords[key]) pyfastaq.intervals.merge_overlapping_in_list(ctg_coords[key]) return ctg_coords, ref_coords
[ "def", "nucmer_hits_to_ref_and_qry_coords", "(", "cls", ",", "nucmer_hits", ",", "contig", "=", "None", ")", ":", "if", "contig", "is", "None", ":", "ctg_coords", "=", "{", "key", ":", "[", "]", "for", "key", "in", "nucmer_hits", ".", "keys", "(", ")", "}", "else", ":", "ctg_coords", "=", "{", "contig", ":", "[", "]", "}", "ref_coords", "=", "{", "}", "for", "key", "in", "ctg_coords", ":", "hits", "=", "copy", ".", "copy", "(", "nucmer_hits", "[", "key", "]", ")", "hits", ".", "sort", "(", "key", "=", "lambda", "x", ":", "len", "(", "x", ".", "ref_coords", "(", ")", ")", ")", "if", "len", "(", "hits", ")", ">", "1", ":", "i", "=", "0", "while", "i", "<", "len", "(", "hits", ")", "-", "1", ":", "c1", "=", "hits", "[", "i", "]", ".", "ref_coords", "(", ")", "c2", "=", "hits", "[", "i", "+", "1", "]", ".", "ref_coords", "(", ")", "if", "c2", ".", "contains", "(", "c1", ")", ":", "hits", ".", "pop", "(", "i", ")", "else", ":", "i", "+=", "1", "ref_coords", "[", "key", "]", "=", "[", "hit", ".", "ref_coords", "(", ")", "for", "hit", "in", "hits", "]", "ctg_coords", "[", "key", "]", "=", "[", "hit", ".", "qry_coords", "(", ")", "for", "hit", "in", "hits", "]", "pyfastaq", ".", "intervals", ".", "merge_overlapping_in_list", "(", "ref_coords", "[", "key", "]", ")", "pyfastaq", ".", "intervals", ".", "merge_overlapping_in_list", "(", "ctg_coords", "[", "key", "]", ")", "return", "ctg_coords", ",", "ref_coords" ]
Same as nucmer_hits_to_ref_coords, except removes containing hits first, and returns ref and qry coords lists
[ "Same", "as", "nucmer_hits_to_ref_coords", "except", "removes", "containing", "hits", "first", "and", "returns", "ref", "and", "qry", "coords", "lists" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/assembly_compare.py#L139-L168
2,219
sanger-pathogens/ariba
ariba/assembly_compare.py
AssemblyCompare.ref_cov_per_contig
def ref_cov_per_contig(nucmer_hits): '''Input is hits made by self._parse_nucmer_coords_file. Returns dictionary. key = contig name. Value = number of bases that match to the reference sequence.''' coords = AssemblyCompare.nucmer_hits_to_ref_coords(nucmer_hits) return {x: pyfastaq.intervals.length_sum_from_list(coords[x]) for x in coords}
python
def ref_cov_per_contig(nucmer_hits): '''Input is hits made by self._parse_nucmer_coords_file. Returns dictionary. key = contig name. Value = number of bases that match to the reference sequence.''' coords = AssemblyCompare.nucmer_hits_to_ref_coords(nucmer_hits) return {x: pyfastaq.intervals.length_sum_from_list(coords[x]) for x in coords}
[ "def", "ref_cov_per_contig", "(", "nucmer_hits", ")", ":", "coords", "=", "AssemblyCompare", ".", "nucmer_hits_to_ref_coords", "(", "nucmer_hits", ")", "return", "{", "x", ":", "pyfastaq", ".", "intervals", ".", "length_sum_from_list", "(", "coords", "[", "x", "]", ")", "for", "x", "in", "coords", "}" ]
Input is hits made by self._parse_nucmer_coords_file. Returns dictionary. key = contig name. Value = number of bases that match to the reference sequence.
[ "Input", "is", "hits", "made", "by", "self", ".", "_parse_nucmer_coords_file", ".", "Returns", "dictionary", ".", "key", "=", "contig", "name", ".", "Value", "=", "number", "of", "bases", "that", "match", "to", "the", "reference", "sequence", "." ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/assembly_compare.py#L172-L177
2,220
sanger-pathogens/ariba
ariba/assembly_compare.py
AssemblyCompare._ref_covered_by_at_least_one_full_length_contig
def _ref_covered_by_at_least_one_full_length_contig(nucmer_hits, percent_threshold, max_nt_extend): '''Returns true iff there exists a contig that completely covers the reference sequence nucmer_hits = hits made by self._parse_nucmer_coords_file.''' for l in nucmer_hits.values(): for hit in l: if ( (2 * max_nt_extend) + len(hit.ref_coords()) ) / hit.ref_length >= percent_threshold: return True return False
python
def _ref_covered_by_at_least_one_full_length_contig(nucmer_hits, percent_threshold, max_nt_extend): '''Returns true iff there exists a contig that completely covers the reference sequence nucmer_hits = hits made by self._parse_nucmer_coords_file.''' for l in nucmer_hits.values(): for hit in l: if ( (2 * max_nt_extend) + len(hit.ref_coords()) ) / hit.ref_length >= percent_threshold: return True return False
[ "def", "_ref_covered_by_at_least_one_full_length_contig", "(", "nucmer_hits", ",", "percent_threshold", ",", "max_nt_extend", ")", ":", "for", "l", "in", "nucmer_hits", ".", "values", "(", ")", ":", "for", "hit", "in", "l", ":", "if", "(", "(", "2", "*", "max_nt_extend", ")", "+", "len", "(", "hit", ".", "ref_coords", "(", ")", ")", ")", "/", "hit", ".", "ref_length", ">=", "percent_threshold", ":", "return", "True", "return", "False" ]
Returns true iff there exists a contig that completely covers the reference sequence nucmer_hits = hits made by self._parse_nucmer_coords_file.
[ "Returns", "true", "iff", "there", "exists", "a", "contig", "that", "completely", "covers", "the", "reference", "sequence", "nucmer_hits", "=", "hits", "made", "by", "self", ".", "_parse_nucmer_coords_file", "." ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/assembly_compare.py#L352-L360
2,221
sanger-pathogens/ariba
ariba/assembly_compare.py
AssemblyCompare.nucmer_hit_containing_reference_position
def nucmer_hit_containing_reference_position(nucmer_hits, ref_name, ref_position, qry_name=None): '''Returns the first nucmer match found that contains the given reference location. nucmer_hits = hits made by self._parse_nucmer_coords_file. Returns None if no matching hit found''' for contig_name in nucmer_hits: for hit in nucmer_hits[contig_name]: if hit.ref_name == ref_name and (qry_name is None or qry_name == hit.qry_name) and hit.ref_coords().distance_to_point(ref_position) == 0: return hit return None
python
def nucmer_hit_containing_reference_position(nucmer_hits, ref_name, ref_position, qry_name=None): '''Returns the first nucmer match found that contains the given reference location. nucmer_hits = hits made by self._parse_nucmer_coords_file. Returns None if no matching hit found''' for contig_name in nucmer_hits: for hit in nucmer_hits[contig_name]: if hit.ref_name == ref_name and (qry_name is None or qry_name == hit.qry_name) and hit.ref_coords().distance_to_point(ref_position) == 0: return hit return None
[ "def", "nucmer_hit_containing_reference_position", "(", "nucmer_hits", ",", "ref_name", ",", "ref_position", ",", "qry_name", "=", "None", ")", ":", "for", "contig_name", "in", "nucmer_hits", ":", "for", "hit", "in", "nucmer_hits", "[", "contig_name", "]", ":", "if", "hit", ".", "ref_name", "==", "ref_name", "and", "(", "qry_name", "is", "None", "or", "qry_name", "==", "hit", ".", "qry_name", ")", "and", "hit", ".", "ref_coords", "(", ")", ".", "distance_to_point", "(", "ref_position", ")", "==", "0", ":", "return", "hit", "return", "None" ]
Returns the first nucmer match found that contains the given reference location. nucmer_hits = hits made by self._parse_nucmer_coords_file. Returns None if no matching hit found
[ "Returns", "the", "first", "nucmer", "match", "found", "that", "contains", "the", "given", "reference", "location", ".", "nucmer_hits", "=", "hits", "made", "by", "self", ".", "_parse_nucmer_coords_file", ".", "Returns", "None", "if", "no", "matching", "hit", "found" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/assembly_compare.py#L384-L393
2,222
sanger-pathogens/ariba
ariba/external_progs.py
ExternalProgs._get_exe
def _get_exe(prog): '''Given a program name, return what we expect its exectuable to be called''' if prog in prog_to_env_var: env_var = prog_to_env_var[prog] if env_var in os.environ: return os.environ[env_var] return prog_to_default[prog]
python
def _get_exe(prog): '''Given a program name, return what we expect its exectuable to be called''' if prog in prog_to_env_var: env_var = prog_to_env_var[prog] if env_var in os.environ: return os.environ[env_var] return prog_to_default[prog]
[ "def", "_get_exe", "(", "prog", ")", ":", "if", "prog", "in", "prog_to_env_var", ":", "env_var", "=", "prog_to_env_var", "[", "prog", "]", "if", "env_var", "in", "os", ".", "environ", ":", "return", "os", ".", "environ", "[", "env_var", "]", "return", "prog_to_default", "[", "prog", "]" ]
Given a program name, return what we expect its exectuable to be called
[ "Given", "a", "program", "name", "return", "what", "we", "expect", "its", "exectuable", "to", "be", "called" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/external_progs.py#L131-L138
2,223
sanger-pathogens/ariba
ariba/cdhit.py
Runner.fake_run
def fake_run(self): '''Doesn't actually run cd-hit. Instead, puts each input sequence into its own cluster. So it's as if cdhit was run, but didn't cluster anything''' clusters = {} used_names = set() seq_reader = pyfastaq.sequences.file_reader(self.infile) for seq in seq_reader: if seq.id in used_names: raise Error('Sequence name "' + seq.id + '" not unique. Cannot continue') clusters[str(len(clusters) + self.min_cluster_number)] = {seq.id} used_names.add(seq.id) return clusters
python
def fake_run(self): '''Doesn't actually run cd-hit. Instead, puts each input sequence into its own cluster. So it's as if cdhit was run, but didn't cluster anything''' clusters = {} used_names = set() seq_reader = pyfastaq.sequences.file_reader(self.infile) for seq in seq_reader: if seq.id in used_names: raise Error('Sequence name "' + seq.id + '" not unique. Cannot continue') clusters[str(len(clusters) + self.min_cluster_number)] = {seq.id} used_names.add(seq.id) return clusters
[ "def", "fake_run", "(", "self", ")", ":", "clusters", "=", "{", "}", "used_names", "=", "set", "(", ")", "seq_reader", "=", "pyfastaq", ".", "sequences", ".", "file_reader", "(", "self", ".", "infile", ")", "for", "seq", "in", "seq_reader", ":", "if", "seq", ".", "id", "in", "used_names", ":", "raise", "Error", "(", "'Sequence name \"'", "+", "seq", ".", "id", "+", "'\" not unique. Cannot continue'", ")", "clusters", "[", "str", "(", "len", "(", "clusters", ")", "+", "self", ".", "min_cluster_number", ")", "]", "=", "{", "seq", ".", "id", "}", "used_names", ".", "add", "(", "seq", ".", "id", ")", "return", "clusters" ]
Doesn't actually run cd-hit. Instead, puts each input sequence into its own cluster. So it's as if cdhit was run, but didn't cluster anything
[ "Doesn", "t", "actually", "run", "cd", "-", "hit", ".", "Instead", "puts", "each", "input", "sequence", "into", "its", "own", "cluster", ".", "So", "it", "s", "as", "if", "cdhit", "was", "run", "but", "didn", "t", "cluster", "anything" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/cdhit.py#L38-L51
2,224
sanger-pathogens/ariba
ariba/cdhit.py
Runner.run_get_clusters_from_file
def run_get_clusters_from_file(self, clusters_infile, all_ref_seqs, rename_dict=None): '''Instead of running cdhit, gets the clusters info from the input file.''' if rename_dict is None: rename_dict = {} # check that every sequence in the clusters file can be # found in the fasta file seq_reader = pyfastaq.sequences.file_reader(self.infile) names_list_from_fasta_file = [seq.id for seq in seq_reader] names_set_from_fasta_file = set(names_list_from_fasta_file) clusters = self._load_user_clusters_file(clusters_infile, all_ref_seqs, rename_dict=rename_dict) if len(names_set_from_fasta_file) != len(names_list_from_fasta_file): raise Error('At least one duplicate name in fasta file ' + self.infile + '. Cannot continue') names_from_clusters_file = set() for new_names in clusters.values(): names_from_clusters_file.update(new_names) if not names_set_from_fasta_file.issubset(names_from_clusters_file): raise Error('Some names in fasta file "' + self.infile + '" not given in cluster file. Cannot continue') return clusters
python
def run_get_clusters_from_file(self, clusters_infile, all_ref_seqs, rename_dict=None): '''Instead of running cdhit, gets the clusters info from the input file.''' if rename_dict is None: rename_dict = {} # check that every sequence in the clusters file can be # found in the fasta file seq_reader = pyfastaq.sequences.file_reader(self.infile) names_list_from_fasta_file = [seq.id for seq in seq_reader] names_set_from_fasta_file = set(names_list_from_fasta_file) clusters = self._load_user_clusters_file(clusters_infile, all_ref_seqs, rename_dict=rename_dict) if len(names_set_from_fasta_file) != len(names_list_from_fasta_file): raise Error('At least one duplicate name in fasta file ' + self.infile + '. Cannot continue') names_from_clusters_file = set() for new_names in clusters.values(): names_from_clusters_file.update(new_names) if not names_set_from_fasta_file.issubset(names_from_clusters_file): raise Error('Some names in fasta file "' + self.infile + '" not given in cluster file. Cannot continue') return clusters
[ "def", "run_get_clusters_from_file", "(", "self", ",", "clusters_infile", ",", "all_ref_seqs", ",", "rename_dict", "=", "None", ")", ":", "if", "rename_dict", "is", "None", ":", "rename_dict", "=", "{", "}", "# check that every sequence in the clusters file can be", "# found in the fasta file", "seq_reader", "=", "pyfastaq", ".", "sequences", ".", "file_reader", "(", "self", ".", "infile", ")", "names_list_from_fasta_file", "=", "[", "seq", ".", "id", "for", "seq", "in", "seq_reader", "]", "names_set_from_fasta_file", "=", "set", "(", "names_list_from_fasta_file", ")", "clusters", "=", "self", ".", "_load_user_clusters_file", "(", "clusters_infile", ",", "all_ref_seqs", ",", "rename_dict", "=", "rename_dict", ")", "if", "len", "(", "names_set_from_fasta_file", ")", "!=", "len", "(", "names_list_from_fasta_file", ")", ":", "raise", "Error", "(", "'At least one duplicate name in fasta file '", "+", "self", ".", "infile", "+", "'. Cannot continue'", ")", "names_from_clusters_file", "=", "set", "(", ")", "for", "new_names", "in", "clusters", ".", "values", "(", ")", ":", "names_from_clusters_file", ".", "update", "(", "new_names", ")", "if", "not", "names_set_from_fasta_file", ".", "issubset", "(", "names_from_clusters_file", ")", ":", "raise", "Error", "(", "'Some names in fasta file \"'", "+", "self", ".", "infile", "+", "'\" not given in cluster file. Cannot continue'", ")", "return", "clusters" ]
Instead of running cdhit, gets the clusters info from the input file.
[ "Instead", "of", "running", "cdhit", "gets", "the", "clusters", "info", "from", "the", "input", "file", "." ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/cdhit.py#L86-L109
2,225
sanger-pathogens/ariba
ariba/mapping.py
sam_pair_to_insert
def sam_pair_to_insert(s1, s2): '''Returns insert size from pair of sam records, as long as their orientation is "innies". Otherwise returns None.''' if s1.is_unmapped or s2.is_unmapped or (s1.tid != s2.tid) or (s1.is_reverse == s2.is_reverse): return None # If here, reads are both mapped to the same ref, and in opposite orientations if s1.is_reverse: end = s1.reference_end - 1 start = s2.reference_start else: end = s2.reference_end - 1 start = s1.reference_start if start < end: return end - start + 1 else: return None
python
def sam_pair_to_insert(s1, s2): '''Returns insert size from pair of sam records, as long as their orientation is "innies". Otherwise returns None.''' if s1.is_unmapped or s2.is_unmapped or (s1.tid != s2.tid) or (s1.is_reverse == s2.is_reverse): return None # If here, reads are both mapped to the same ref, and in opposite orientations if s1.is_reverse: end = s1.reference_end - 1 start = s2.reference_start else: end = s2.reference_end - 1 start = s1.reference_start if start < end: return end - start + 1 else: return None
[ "def", "sam_pair_to_insert", "(", "s1", ",", "s2", ")", ":", "if", "s1", ".", "is_unmapped", "or", "s2", ".", "is_unmapped", "or", "(", "s1", ".", "tid", "!=", "s2", ".", "tid", ")", "or", "(", "s1", ".", "is_reverse", "==", "s2", ".", "is_reverse", ")", ":", "return", "None", "# If here, reads are both mapped to the same ref, and in opposite orientations", "if", "s1", ".", "is_reverse", ":", "end", "=", "s1", ".", "reference_end", "-", "1", "start", "=", "s2", ".", "reference_start", "else", ":", "end", "=", "s2", ".", "reference_end", "-", "1", "start", "=", "s1", ".", "reference_start", "if", "start", "<", "end", ":", "return", "end", "-", "start", "+", "1", "else", ":", "return", "None" ]
Returns insert size from pair of sam records, as long as their orientation is "innies". Otherwise returns None.
[ "Returns", "insert", "size", "from", "pair", "of", "sam", "records", "as", "long", "as", "their", "orientation", "is", "innies", ".", "Otherwise", "returns", "None", "." ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/mapping.py#L153-L170
2,226
sanger-pathogens/ariba
ariba/scaffold_graph.py
Graph.update_from_sam
def update_from_sam(self, sam, sam_reader): '''Updates graph info from a pysam.AlignedSegment object''' if sam.is_unmapped \ or sam.mate_is_unmapped \ or (sam.reference_id == sam.next_reference_id): return new_link = link.Link(sam, sam_reader, self.ref_lengths) read_name = sam.query_name if read_name in self.partial_links: new_link.merge(self.partial_links[read_name]) del self.partial_links[read_name] key = tuple(sorted((new_link.refnames[0], new_link.refnames[1]))) if key not in self.links: self.links[key] = [] new_link.sort() self.links[key].append(new_link) else: self.partial_links[read_name] = new_link
python
def update_from_sam(self, sam, sam_reader): '''Updates graph info from a pysam.AlignedSegment object''' if sam.is_unmapped \ or sam.mate_is_unmapped \ or (sam.reference_id == sam.next_reference_id): return new_link = link.Link(sam, sam_reader, self.ref_lengths) read_name = sam.query_name if read_name in self.partial_links: new_link.merge(self.partial_links[read_name]) del self.partial_links[read_name] key = tuple(sorted((new_link.refnames[0], new_link.refnames[1]))) if key not in self.links: self.links[key] = [] new_link.sort() self.links[key].append(new_link) else: self.partial_links[read_name] = new_link
[ "def", "update_from_sam", "(", "self", ",", "sam", ",", "sam_reader", ")", ":", "if", "sam", ".", "is_unmapped", "or", "sam", ".", "mate_is_unmapped", "or", "(", "sam", ".", "reference_id", "==", "sam", ".", "next_reference_id", ")", ":", "return", "new_link", "=", "link", ".", "Link", "(", "sam", ",", "sam_reader", ",", "self", ".", "ref_lengths", ")", "read_name", "=", "sam", ".", "query_name", "if", "read_name", "in", "self", ".", "partial_links", ":", "new_link", ".", "merge", "(", "self", ".", "partial_links", "[", "read_name", "]", ")", "del", "self", ".", "partial_links", "[", "read_name", "]", "key", "=", "tuple", "(", "sorted", "(", "(", "new_link", ".", "refnames", "[", "0", "]", ",", "new_link", ".", "refnames", "[", "1", "]", ")", ")", ")", "if", "key", "not", "in", "self", ".", "links", ":", "self", ".", "links", "[", "key", "]", "=", "[", "]", "new_link", ".", "sort", "(", ")", "self", ".", "links", "[", "key", "]", ".", "append", "(", "new_link", ")", "else", ":", "self", ".", "partial_links", "[", "read_name", "]", "=", "new_link" ]
Updates graph info from a pysam.AlignedSegment object
[ "Updates", "graph", "info", "from", "a", "pysam", ".", "AlignedSegment", "object" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/scaffold_graph.py#L13-L32
2,227
sanger-pathogens/ariba
ariba/scaffold_graph.py
Graph._make_graph
def _make_graph(self, max_insert): '''helper function to construct graph from current state of object''' if len(self.partial_links) != 0: raise Error('Error in _make_graph(). Cannot continue because there are partial links') self.contig_links = {} for key in self.links: for l in self.links[key]: insert_size = l.insert_size() if insert_size <= max_insert: if key not in self.contig_links: self.contig_links[key] = {} dirs = ''.join(l.dirs) self.contig_links[key][dirs] = self.contig_links[key].get(dirs, 0) + 1
python
def _make_graph(self, max_insert): '''helper function to construct graph from current state of object''' if len(self.partial_links) != 0: raise Error('Error in _make_graph(). Cannot continue because there are partial links') self.contig_links = {} for key in self.links: for l in self.links[key]: insert_size = l.insert_size() if insert_size <= max_insert: if key not in self.contig_links: self.contig_links[key] = {} dirs = ''.join(l.dirs) self.contig_links[key][dirs] = self.contig_links[key].get(dirs, 0) + 1
[ "def", "_make_graph", "(", "self", ",", "max_insert", ")", ":", "if", "len", "(", "self", ".", "partial_links", ")", "!=", "0", ":", "raise", "Error", "(", "'Error in _make_graph(). Cannot continue because there are partial links'", ")", "self", ".", "contig_links", "=", "{", "}", "for", "key", "in", "self", ".", "links", ":", "for", "l", "in", "self", ".", "links", "[", "key", "]", ":", "insert_size", "=", "l", ".", "insert_size", "(", ")", "if", "insert_size", "<=", "max_insert", ":", "if", "key", "not", "in", "self", ".", "contig_links", ":", "self", ".", "contig_links", "[", "key", "]", "=", "{", "}", "dirs", "=", "''", ".", "join", "(", "l", ".", "dirs", ")", "self", ".", "contig_links", "[", "key", "]", "[", "dirs", "]", "=", "self", ".", "contig_links", "[", "key", "]", ".", "get", "(", "dirs", ",", "0", ")", "+", "1" ]
helper function to construct graph from current state of object
[ "helper", "function", "to", "construct", "graph", "from", "current", "state", "of", "object" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/scaffold_graph.py#L35-L48
2,228
sanger-pathogens/ariba
ariba/bam_parse.py
Parser._sam_to_soft_clipped
def _sam_to_soft_clipped(self, sam): '''Returns tuple of whether or not the left and right end of the mapped read in the sam record is soft-clipped''' if sam.is_unmapped: raise Error('Cannot get soft clip info from an unmapped read') if sam.cigar is None or len(sam.cigar) == 0: return False, False return (sam.cigar[0][0] == 4, sam.cigar[-1][0] == 4)
python
def _sam_to_soft_clipped(self, sam): '''Returns tuple of whether or not the left and right end of the mapped read in the sam record is soft-clipped''' if sam.is_unmapped: raise Error('Cannot get soft clip info from an unmapped read') if sam.cigar is None or len(sam.cigar) == 0: return False, False return (sam.cigar[0][0] == 4, sam.cigar[-1][0] == 4)
[ "def", "_sam_to_soft_clipped", "(", "self", ",", "sam", ")", ":", "if", "sam", ".", "is_unmapped", ":", "raise", "Error", "(", "'Cannot get soft clip info from an unmapped read'", ")", "if", "sam", ".", "cigar", "is", "None", "or", "len", "(", "sam", ".", "cigar", ")", "==", "0", ":", "return", "False", ",", "False", "return", "(", "sam", ".", "cigar", "[", "0", "]", "[", "0", "]", "==", "4", ",", "sam", ".", "cigar", "[", "-", "1", "]", "[", "0", "]", "==", "4", ")" ]
Returns tuple of whether or not the left and right end of the mapped read in the sam record is soft-clipped
[ "Returns", "tuple", "of", "whether", "or", "not", "the", "left", "and", "right", "end", "of", "the", "mapped", "read", "in", "the", "sam", "record", "is", "soft", "-", "clipped" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/bam_parse.py#L21-L28
2,229
sanger-pathogens/ariba
ariba/report_filter.py
ReportFilter._report_line_to_dict
def _report_line_to_dict(cls, line): '''Takes report line string as input. Returns a dict of column name -> value in line''' data = line.split('\t') if len(data) != len(report.columns): return None d = dict(zip(report.columns, data)) for key in report.int_columns: try: d[key] = int(d[key]) except: assert d[key] == '.' for key in report.float_columns: try: d[key] = float(d[key]) except: assert d[key] == '.' d['flag'] = flag.Flag(int(d['flag'])) return d
python
def _report_line_to_dict(cls, line): '''Takes report line string as input. Returns a dict of column name -> value in line''' data = line.split('\t') if len(data) != len(report.columns): return None d = dict(zip(report.columns, data)) for key in report.int_columns: try: d[key] = int(d[key]) except: assert d[key] == '.' for key in report.float_columns: try: d[key] = float(d[key]) except: assert d[key] == '.' d['flag'] = flag.Flag(int(d['flag'])) return d
[ "def", "_report_line_to_dict", "(", "cls", ",", "line", ")", ":", "data", "=", "line", ".", "split", "(", "'\\t'", ")", "if", "len", "(", "data", ")", "!=", "len", "(", "report", ".", "columns", ")", ":", "return", "None", "d", "=", "dict", "(", "zip", "(", "report", ".", "columns", ",", "data", ")", ")", "for", "key", "in", "report", ".", "int_columns", ":", "try", ":", "d", "[", "key", "]", "=", "int", "(", "d", "[", "key", "]", ")", "except", ":", "assert", "d", "[", "key", "]", "==", "'.'", "for", "key", "in", "report", ".", "float_columns", ":", "try", ":", "d", "[", "key", "]", "=", "float", "(", "d", "[", "key", "]", ")", "except", ":", "assert", "d", "[", "key", "]", "==", "'.'", "d", "[", "'flag'", "]", "=", "flag", ".", "Flag", "(", "int", "(", "d", "[", "'flag'", "]", ")", ")", "return", "d" ]
Takes report line string as input. Returns a dict of column name -> value in line
[ "Takes", "report", "line", "string", "as", "input", ".", "Returns", "a", "dict", "of", "column", "name", "-", ">", "value", "in", "line" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/report_filter.py#L33-L53
2,230
sanger-pathogens/ariba
ariba/report_filter.py
ReportFilter._dict_to_report_line
def _dict_to_report_line(cls, report_dict): '''Takes a report_dict as input and returns a report line''' return '\t'.join([str(report_dict[x]) for x in report.columns])
python
def _dict_to_report_line(cls, report_dict): '''Takes a report_dict as input and returns a report line''' return '\t'.join([str(report_dict[x]) for x in report.columns])
[ "def", "_dict_to_report_line", "(", "cls", ",", "report_dict", ")", ":", "return", "'\\t'", ".", "join", "(", "[", "str", "(", "report_dict", "[", "x", "]", ")", "for", "x", "in", "report", ".", "columns", "]", ")" ]
Takes a report_dict as input and returns a report line
[ "Takes", "a", "report_dict", "as", "input", "and", "returns", "a", "report", "line" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/report_filter.py#L57-L59
2,231
sanger-pathogens/ariba
ariba/report_filter.py
ReportFilter._load_report
def _load_report(infile): '''Loads report file into a dictionary. Key=reference name. Value = list of report lines for that reference''' report_dict = {} f = pyfastaq.utils.open_file_read(infile) first_line = True for line in f: line = line.rstrip() if first_line: expected_first_line = '#' + '\t'.join(report.columns) if line != expected_first_line: pyfastaq.utils.close(f) raise Error('Error reading report file. Expected first line of file is\n' + expected_first_line + '\nbut got:\n' + line) first_line = False else: line_dict = ReportFilter._report_line_to_dict(line) if line_dict is None: pyfastaq.utils.close(f) raise Error('Error reading report file at this line:\n' + line) ref_name = line_dict['ref_name'] ctg_name = line_dict['ctg'] if ref_name not in report_dict: report_dict[ref_name] = {} if ctg_name not in report_dict[ref_name]: report_dict[ref_name][ctg_name] = [] report_dict[ref_name][ctg_name].append(line_dict) pyfastaq.utils.close(f) return report_dict
python
def _load_report(infile): '''Loads report file into a dictionary. Key=reference name. Value = list of report lines for that reference''' report_dict = {} f = pyfastaq.utils.open_file_read(infile) first_line = True for line in f: line = line.rstrip() if first_line: expected_first_line = '#' + '\t'.join(report.columns) if line != expected_first_line: pyfastaq.utils.close(f) raise Error('Error reading report file. Expected first line of file is\n' + expected_first_line + '\nbut got:\n' + line) first_line = False else: line_dict = ReportFilter._report_line_to_dict(line) if line_dict is None: pyfastaq.utils.close(f) raise Error('Error reading report file at this line:\n' + line) ref_name = line_dict['ref_name'] ctg_name = line_dict['ctg'] if ref_name not in report_dict: report_dict[ref_name] = {} if ctg_name not in report_dict[ref_name]: report_dict[ref_name][ctg_name] = [] report_dict[ref_name][ctg_name].append(line_dict) pyfastaq.utils.close(f) return report_dict
[ "def", "_load_report", "(", "infile", ")", ":", "report_dict", "=", "{", "}", "f", "=", "pyfastaq", ".", "utils", ".", "open_file_read", "(", "infile", ")", "first_line", "=", "True", "for", "line", "in", "f", ":", "line", "=", "line", ".", "rstrip", "(", ")", "if", "first_line", ":", "expected_first_line", "=", "'#'", "+", "'\\t'", ".", "join", "(", "report", ".", "columns", ")", "if", "line", "!=", "expected_first_line", ":", "pyfastaq", ".", "utils", ".", "close", "(", "f", ")", "raise", "Error", "(", "'Error reading report file. Expected first line of file is\\n'", "+", "expected_first_line", "+", "'\\nbut got:\\n'", "+", "line", ")", "first_line", "=", "False", "else", ":", "line_dict", "=", "ReportFilter", ".", "_report_line_to_dict", "(", "line", ")", "if", "line_dict", "is", "None", ":", "pyfastaq", ".", "utils", ".", "close", "(", "f", ")", "raise", "Error", "(", "'Error reading report file at this line:\\n'", "+", "line", ")", "ref_name", "=", "line_dict", "[", "'ref_name'", "]", "ctg_name", "=", "line_dict", "[", "'ctg'", "]", "if", "ref_name", "not", "in", "report_dict", ":", "report_dict", "[", "ref_name", "]", "=", "{", "}", "if", "ctg_name", "not", "in", "report_dict", "[", "ref_name", "]", ":", "report_dict", "[", "ref_name", "]", "[", "ctg_name", "]", "=", "[", "]", "report_dict", "[", "ref_name", "]", "[", "ctg_name", "]", ".", "append", "(", "line_dict", ")", "pyfastaq", ".", "utils", ".", "close", "(", "f", ")", "return", "report_dict" ]
Loads report file into a dictionary. Key=reference name. Value = list of report lines for that reference
[ "Loads", "report", "file", "into", "a", "dictionary", ".", "Key", "=", "reference", "name", ".", "Value", "=", "list", "of", "report", "lines", "for", "that", "reference" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/report_filter.py#L63-L94
2,232
sanger-pathogens/ariba
ariba/report_filter.py
ReportFilter._filter_dicts
def _filter_dicts(self): '''Filters out all the report_dicts that do not pass the cutoffs. If any ref sequence loses all of its report_dicts, then it is completely removed.''' keys_to_remove = set() for ref_name in self.report: for ctg_name in self.report[ref_name]: self.report[ref_name][ctg_name] = self._filter_list_of_dicts(self.report[ref_name][ctg_name]) if len(self.report[ref_name][ctg_name]) == 0: keys_to_remove.add((ref_name, ctg_name)) refs_to_remove = set() for ref_name, ctg_name in keys_to_remove: del self.report[ref_name][ctg_name] if len(self.report[ref_name]) == 0: refs_to_remove.add(ref_name) for ref_name in refs_to_remove: del self.report[ref_name]
python
def _filter_dicts(self): '''Filters out all the report_dicts that do not pass the cutoffs. If any ref sequence loses all of its report_dicts, then it is completely removed.''' keys_to_remove = set() for ref_name in self.report: for ctg_name in self.report[ref_name]: self.report[ref_name][ctg_name] = self._filter_list_of_dicts(self.report[ref_name][ctg_name]) if len(self.report[ref_name][ctg_name]) == 0: keys_to_remove.add((ref_name, ctg_name)) refs_to_remove = set() for ref_name, ctg_name in keys_to_remove: del self.report[ref_name][ctg_name] if len(self.report[ref_name]) == 0: refs_to_remove.add(ref_name) for ref_name in refs_to_remove: del self.report[ref_name]
[ "def", "_filter_dicts", "(", "self", ")", ":", "keys_to_remove", "=", "set", "(", ")", "for", "ref_name", "in", "self", ".", "report", ":", "for", "ctg_name", "in", "self", ".", "report", "[", "ref_name", "]", ":", "self", ".", "report", "[", "ref_name", "]", "[", "ctg_name", "]", "=", "self", ".", "_filter_list_of_dicts", "(", "self", ".", "report", "[", "ref_name", "]", "[", "ctg_name", "]", ")", "if", "len", "(", "self", ".", "report", "[", "ref_name", "]", "[", "ctg_name", "]", ")", "==", "0", ":", "keys_to_remove", ".", "add", "(", "(", "ref_name", ",", "ctg_name", ")", ")", "refs_to_remove", "=", "set", "(", ")", "for", "ref_name", ",", "ctg_name", "in", "keys_to_remove", ":", "del", "self", ".", "report", "[", "ref_name", "]", "[", "ctg_name", "]", "if", "len", "(", "self", ".", "report", "[", "ref_name", "]", ")", "==", "0", ":", "refs_to_remove", ".", "add", "(", "ref_name", ")", "for", "ref_name", "in", "refs_to_remove", ":", "del", "self", ".", "report", "[", "ref_name", "]" ]
Filters out all the report_dicts that do not pass the cutoffs. If any ref sequence loses all of its report_dicts, then it is completely removed.
[ "Filters", "out", "all", "the", "report_dicts", "that", "do", "not", "pass", "the", "cutoffs", ".", "If", "any", "ref", "sequence", "loses", "all", "of", "its", "report_dicts", "then", "it", "is", "completely", "removed", "." ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/report_filter.py#L167-L186
2,233
sanger-pathogens/ariba
ariba/link.py
Link.merge
def merge(self, other): '''Merge another link into this one. Expected that each link was created from each mate from a pair. We only know both distances to contig ends when we have read info from both mappings in a BAM file. All other info should be the same.''' assert self.refnames == other.refnames assert self.dirs == other.dirs assert self.lengths == other.lengths for i in range(2): if self.pos[i] is None: if other.pos[i] is None: raise Error('Error merging these two links:\n' + str(self) + '\n' + str(other)) self.pos[i] = other.pos[i] else: if other.pos[i] is not None: raise Error('Error merging these two links:\n' + str(self) + '\n' + str(other))
python
def merge(self, other): '''Merge another link into this one. Expected that each link was created from each mate from a pair. We only know both distances to contig ends when we have read info from both mappings in a BAM file. All other info should be the same.''' assert self.refnames == other.refnames assert self.dirs == other.dirs assert self.lengths == other.lengths for i in range(2): if self.pos[i] is None: if other.pos[i] is None: raise Error('Error merging these two links:\n' + str(self) + '\n' + str(other)) self.pos[i] = other.pos[i] else: if other.pos[i] is not None: raise Error('Error merging these two links:\n' + str(self) + '\n' + str(other))
[ "def", "merge", "(", "self", ",", "other", ")", ":", "assert", "self", ".", "refnames", "==", "other", ".", "refnames", "assert", "self", ".", "dirs", "==", "other", ".", "dirs", "assert", "self", ".", "lengths", "==", "other", ".", "lengths", "for", "i", "in", "range", "(", "2", ")", ":", "if", "self", ".", "pos", "[", "i", "]", "is", "None", ":", "if", "other", ".", "pos", "[", "i", "]", "is", "None", ":", "raise", "Error", "(", "'Error merging these two links:\\n'", "+", "str", "(", "self", ")", "+", "'\\n'", "+", "str", "(", "other", ")", ")", "self", ".", "pos", "[", "i", "]", "=", "other", ".", "pos", "[", "i", "]", "else", ":", "if", "other", ".", "pos", "[", "i", "]", "is", "not", "None", ":", "raise", "Error", "(", "'Error merging these two links:\\n'", "+", "str", "(", "self", ")", "+", "'\\n'", "+", "str", "(", "other", ")", ")" ]
Merge another link into this one. Expected that each link was created from each mate from a pair. We only know both distances to contig ends when we have read info from both mappings in a BAM file. All other info should be the same.
[ "Merge", "another", "link", "into", "this", "one", ".", "Expected", "that", "each", "link", "was", "created", "from", "each", "mate", "from", "a", "pair", ".", "We", "only", "know", "both", "distances", "to", "contig", "ends", "when", "we", "have", "read", "info", "from", "both", "mappings", "in", "a", "BAM", "file", ".", "All", "other", "info", "should", "be", "the", "same", "." ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/link.py#L80-L93
2,234
sanger-pathogens/ariba
ariba/summary.py
Summary._load_fofn
def _load_fofn(cls, fofn): '''Returns dictionary of filename -> short name. Value is None whenever short name is not provided''' filenames = {} f = pyfastaq.utils.open_file_read(fofn) for line in f: fields = line.rstrip().split() if len(fields) == 1: filenames[fields[0]] = None elif len(fields) == 2: filenames[fields[0]] = fields[1] else: raise Error('Error at the following line of file ' + fofn + '. Expected at most 2 fields.\n' + line) pyfastaq.utils.close(f) return filenames
python
def _load_fofn(cls, fofn): '''Returns dictionary of filename -> short name. Value is None whenever short name is not provided''' filenames = {} f = pyfastaq.utils.open_file_read(fofn) for line in f: fields = line.rstrip().split() if len(fields) == 1: filenames[fields[0]] = None elif len(fields) == 2: filenames[fields[0]] = fields[1] else: raise Error('Error at the following line of file ' + fofn + '. Expected at most 2 fields.\n' + line) pyfastaq.utils.close(f) return filenames
[ "def", "_load_fofn", "(", "cls", ",", "fofn", ")", ":", "filenames", "=", "{", "}", "f", "=", "pyfastaq", ".", "utils", ".", "open_file_read", "(", "fofn", ")", "for", "line", "in", "f", ":", "fields", "=", "line", ".", "rstrip", "(", ")", ".", "split", "(", ")", "if", "len", "(", "fields", ")", "==", "1", ":", "filenames", "[", "fields", "[", "0", "]", "]", "=", "None", "elif", "len", "(", "fields", ")", "==", "2", ":", "filenames", "[", "fields", "[", "0", "]", "]", "=", "fields", "[", "1", "]", "else", ":", "raise", "Error", "(", "'Error at the following line of file '", "+", "fofn", "+", "'. Expected at most 2 fields.\\n'", "+", "line", ")", "pyfastaq", ".", "utils", ".", "close", "(", "f", ")", "return", "filenames" ]
Returns dictionary of filename -> short name. Value is None whenever short name is not provided
[ "Returns", "dictionary", "of", "filename", "-", ">", "short", "name", ".", "Value", "is", "None", "whenever", "short", "name", "is", "not", "provided" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/summary.py#L70-L85
2,235
sanger-pathogens/ariba
ariba/summary.py
Summary._filter_matrix_rows
def _filter_matrix_rows(cls, matrix): '''matrix = output from _to_matrix''' indexes_to_keep = [] for i in range(len(matrix)): keep_row = False for element in matrix[i]: if element not in {'NA', 'no'}: keep_row = True break if keep_row: indexes_to_keep.append(i) return [matrix[i] for i in indexes_to_keep]
python
def _filter_matrix_rows(cls, matrix): '''matrix = output from _to_matrix''' indexes_to_keep = [] for i in range(len(matrix)): keep_row = False for element in matrix[i]: if element not in {'NA', 'no'}: keep_row = True break if keep_row: indexes_to_keep.append(i) return [matrix[i] for i in indexes_to_keep]
[ "def", "_filter_matrix_rows", "(", "cls", ",", "matrix", ")", ":", "indexes_to_keep", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "matrix", ")", ")", ":", "keep_row", "=", "False", "for", "element", "in", "matrix", "[", "i", "]", ":", "if", "element", "not", "in", "{", "'NA'", ",", "'no'", "}", ":", "keep_row", "=", "True", "break", "if", "keep_row", ":", "indexes_to_keep", ".", "append", "(", "i", ")", "return", "[", "matrix", "[", "i", "]", "for", "i", "in", "indexes_to_keep", "]" ]
matrix = output from _to_matrix
[ "matrix", "=", "output", "from", "_to_matrix" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/summary.py#L223-L236
2,236
sanger-pathogens/ariba
ariba/summary.py
Summary._filter_matrix_columns
def _filter_matrix_columns(cls, matrix, phandango_header, csv_header): '''phandango_header, csv_header, matrix = output from _to_matrix''' indexes_to_keep = set() for row in matrix: for i in range(len(row)): if row[i] not in {'NA', 'no'}: indexes_to_keep.add(i) indexes_to_keep = sorted(list(indexes_to_keep)) for i in range(len(matrix)): matrix[i] = [matrix[i][j] for j in indexes_to_keep] phandango_header = [phandango_header[i] for i in indexes_to_keep] csv_header = [csv_header[i] for i in indexes_to_keep] return phandango_header, csv_header, matrix
python
def _filter_matrix_columns(cls, matrix, phandango_header, csv_header): '''phandango_header, csv_header, matrix = output from _to_matrix''' indexes_to_keep = set() for row in matrix: for i in range(len(row)): if row[i] not in {'NA', 'no'}: indexes_to_keep.add(i) indexes_to_keep = sorted(list(indexes_to_keep)) for i in range(len(matrix)): matrix[i] = [matrix[i][j] for j in indexes_to_keep] phandango_header = [phandango_header[i] for i in indexes_to_keep] csv_header = [csv_header[i] for i in indexes_to_keep] return phandango_header, csv_header, matrix
[ "def", "_filter_matrix_columns", "(", "cls", ",", "matrix", ",", "phandango_header", ",", "csv_header", ")", ":", "indexes_to_keep", "=", "set", "(", ")", "for", "row", "in", "matrix", ":", "for", "i", "in", "range", "(", "len", "(", "row", ")", ")", ":", "if", "row", "[", "i", "]", "not", "in", "{", "'NA'", ",", "'no'", "}", ":", "indexes_to_keep", ".", "add", "(", "i", ")", "indexes_to_keep", "=", "sorted", "(", "list", "(", "indexes_to_keep", ")", ")", "for", "i", "in", "range", "(", "len", "(", "matrix", ")", ")", ":", "matrix", "[", "i", "]", "=", "[", "matrix", "[", "i", "]", "[", "j", "]", "for", "j", "in", "indexes_to_keep", "]", "phandango_header", "=", "[", "phandango_header", "[", "i", "]", "for", "i", "in", "indexes_to_keep", "]", "csv_header", "=", "[", "csv_header", "[", "i", "]", "for", "i", "in", "indexes_to_keep", "]", "return", "phandango_header", ",", "csv_header", ",", "matrix" ]
phandango_header, csv_header, matrix = output from _to_matrix
[ "phandango_header", "csv_header", "matrix", "=", "output", "from", "_to_matrix" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/summary.py#L240-L256
2,237
sanger-pathogens/ariba
ariba/assembly_variants.py
AssemblyVariants._get_remaining_known_ref_variants
def _get_remaining_known_ref_variants(known_ref_variants, used_ref_variants, nucmer_coords): '''Finds variants where ref has the variant and so does the contig. Which means that there was no mummer call to flag it up so need to look through the known ref variants. Also need to check that the variant is in a nucmer match to an assembly contig.''' variants = [] for ref_variant_pos, ref_variants_set in sorted(known_ref_variants.items()): for known_ref_variant in ref_variants_set: if known_ref_variant not in used_ref_variants: variant_pos_matches_contig = False pos = known_ref_variant.variant.position if known_ref_variant.seq_type == 'n': ref_interval = intervals.Interval(pos, pos) elif known_ref_variant.seq_type == 'p': ref_interval = intervals.Interval(3 * pos, 3 * pos + 2) else: raise Error('Unexpected variant type "' + known_ref_variant.variant_type + '" in _get_remaining_known_ref_variants. Cannot continue') for interval in nucmer_coords: if ref_interval.intersects(interval): variant_pos_matches_contig = True break if variant_pos_matches_contig: variants.append((None, known_ref_variant.seq_type, None, None, None, {known_ref_variant}, set())) return variants
python
def _get_remaining_known_ref_variants(known_ref_variants, used_ref_variants, nucmer_coords): '''Finds variants where ref has the variant and so does the contig. Which means that there was no mummer call to flag it up so need to look through the known ref variants. Also need to check that the variant is in a nucmer match to an assembly contig.''' variants = [] for ref_variant_pos, ref_variants_set in sorted(known_ref_variants.items()): for known_ref_variant in ref_variants_set: if known_ref_variant not in used_ref_variants: variant_pos_matches_contig = False pos = known_ref_variant.variant.position if known_ref_variant.seq_type == 'n': ref_interval = intervals.Interval(pos, pos) elif known_ref_variant.seq_type == 'p': ref_interval = intervals.Interval(3 * pos, 3 * pos + 2) else: raise Error('Unexpected variant type "' + known_ref_variant.variant_type + '" in _get_remaining_known_ref_variants. Cannot continue') for interval in nucmer_coords: if ref_interval.intersects(interval): variant_pos_matches_contig = True break if variant_pos_matches_contig: variants.append((None, known_ref_variant.seq_type, None, None, None, {known_ref_variant}, set())) return variants
[ "def", "_get_remaining_known_ref_variants", "(", "known_ref_variants", ",", "used_ref_variants", ",", "nucmer_coords", ")", ":", "variants", "=", "[", "]", "for", "ref_variant_pos", ",", "ref_variants_set", "in", "sorted", "(", "known_ref_variants", ".", "items", "(", ")", ")", ":", "for", "known_ref_variant", "in", "ref_variants_set", ":", "if", "known_ref_variant", "not", "in", "used_ref_variants", ":", "variant_pos_matches_contig", "=", "False", "pos", "=", "known_ref_variant", ".", "variant", ".", "position", "if", "known_ref_variant", ".", "seq_type", "==", "'n'", ":", "ref_interval", "=", "intervals", ".", "Interval", "(", "pos", ",", "pos", ")", "elif", "known_ref_variant", ".", "seq_type", "==", "'p'", ":", "ref_interval", "=", "intervals", ".", "Interval", "(", "3", "*", "pos", ",", "3", "*", "pos", "+", "2", ")", "else", ":", "raise", "Error", "(", "'Unexpected variant type \"'", "+", "known_ref_variant", ".", "variant_type", "+", "'\" in _get_remaining_known_ref_variants. Cannot continue'", ")", "for", "interval", "in", "nucmer_coords", ":", "if", "ref_interval", ".", "intersects", "(", "interval", ")", ":", "variant_pos_matches_contig", "=", "True", "break", "if", "variant_pos_matches_contig", ":", "variants", ".", "append", "(", "(", "None", ",", "known_ref_variant", ".", "seq_type", ",", "None", ",", "None", ",", "None", ",", "{", "known_ref_variant", "}", ",", "set", "(", ")", ")", ")", "return", "variants" ]
Finds variants where ref has the variant and so does the contig. Which means that there was no mummer call to flag it up so need to look through the known ref variants. Also need to check that the variant is in a nucmer match to an assembly contig.
[ "Finds", "variants", "where", "ref", "has", "the", "variant", "and", "so", "does", "the", "contig", ".", "Which", "means", "that", "there", "was", "no", "mummer", "call", "to", "flag", "it", "up", "so", "need", "to", "look", "through", "the", "known", "ref", "variants", ".", "Also", "need", "to", "check", "that", "the", "variant", "is", "in", "a", "nucmer", "match", "to", "an", "assembly", "contig", "." ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/assembly_variants.py#L232-L260
2,238
sanger-pathogens/ariba
ariba/report.py
_samtools_depths_at_known_snps_all_wild
def _samtools_depths_at_known_snps_all_wild(sequence_meta, contig_name, cluster, variant_list): '''Input is a known variants, as sequence_metadata object. The assumption is that both the reference and the assembly have the variant type, not wild type. The list variant_list should be a list of pymummer.variant.Variant objects, only contaning variants to the relevant query contig''' ref_nuc_range = sequence_meta.variant.nucleotide_range() if ref_nuc_range is None: return None bases = [] ctg_nts = [] ref_nts = [] smtls_total_depths = [] smtls_nts = [] smtls_depths = [] contig_positions = [] for ref_position in range(ref_nuc_range[0], ref_nuc_range[1]+1, 1): nucmer_match = cluster.assembly_compare.nucmer_hit_containing_reference_position(cluster.assembly_compare.nucmer_hits, cluster.ref_sequence.id, ref_position, qry_name=contig_name) if nucmer_match is not None: # work out contig position. Needs indels variants to correct the position ref_nts.append(cluster.ref_sequence[ref_position]) contig_position, in_indel = nucmer_match.qry_coords_from_ref_coord(ref_position, variant_list) contig_positions.append(contig_position) bases, total_depth, base_depths = cluster.samtools_vars.get_depths_at_position(contig_name, contig_position) ctg_nts.append(cluster.assembly.sequences[contig_name][contig_position]) smtls_nts.append(bases) smtls_total_depths.append(total_depth) smtls_depths.append(base_depths) ctg_nts = ';'.join(ctg_nts) if len(ctg_nts) else '.' ref_nts = ';'.join(ref_nts) if len(ref_nts) else '.' smtls_nts = ';'.join(smtls_nts) if len(smtls_nts) else '.' smtls_total_depths = ';'.join([str(x)for x in smtls_total_depths]) if len(smtls_total_depths) else '.' smtls_depths = ';'.join([str(x)for x in smtls_depths]) if len(smtls_depths) else '.' ctg_start = str(min(contig_positions) + 1) if contig_positions is not None else '.' ctg_end = str(max(contig_positions) + 1) if contig_positions is not None else '.' return [str(x) for x in [ ref_nuc_range[0] + 1, ref_nuc_range[1] + 1, ref_nts, ctg_start, ctg_end, ctg_nts, smtls_total_depths, smtls_nts, smtls_depths ]]
python
def _samtools_depths_at_known_snps_all_wild(sequence_meta, contig_name, cluster, variant_list): '''Input is a known variants, as sequence_metadata object. The assumption is that both the reference and the assembly have the variant type, not wild type. The list variant_list should be a list of pymummer.variant.Variant objects, only contaning variants to the relevant query contig''' ref_nuc_range = sequence_meta.variant.nucleotide_range() if ref_nuc_range is None: return None bases = [] ctg_nts = [] ref_nts = [] smtls_total_depths = [] smtls_nts = [] smtls_depths = [] contig_positions = [] for ref_position in range(ref_nuc_range[0], ref_nuc_range[1]+1, 1): nucmer_match = cluster.assembly_compare.nucmer_hit_containing_reference_position(cluster.assembly_compare.nucmer_hits, cluster.ref_sequence.id, ref_position, qry_name=contig_name) if nucmer_match is not None: # work out contig position. Needs indels variants to correct the position ref_nts.append(cluster.ref_sequence[ref_position]) contig_position, in_indel = nucmer_match.qry_coords_from_ref_coord(ref_position, variant_list) contig_positions.append(contig_position) bases, total_depth, base_depths = cluster.samtools_vars.get_depths_at_position(contig_name, contig_position) ctg_nts.append(cluster.assembly.sequences[contig_name][contig_position]) smtls_nts.append(bases) smtls_total_depths.append(total_depth) smtls_depths.append(base_depths) ctg_nts = ';'.join(ctg_nts) if len(ctg_nts) else '.' ref_nts = ';'.join(ref_nts) if len(ref_nts) else '.' smtls_nts = ';'.join(smtls_nts) if len(smtls_nts) else '.' smtls_total_depths = ';'.join([str(x)for x in smtls_total_depths]) if len(smtls_total_depths) else '.' smtls_depths = ';'.join([str(x)for x in smtls_depths]) if len(smtls_depths) else '.' ctg_start = str(min(contig_positions) + 1) if contig_positions is not None else '.' ctg_end = str(max(contig_positions) + 1) if contig_positions is not None else '.' return [str(x) for x in [ ref_nuc_range[0] + 1, ref_nuc_range[1] + 1, ref_nts, ctg_start, ctg_end, ctg_nts, smtls_total_depths, smtls_nts, smtls_depths ]]
[ "def", "_samtools_depths_at_known_snps_all_wild", "(", "sequence_meta", ",", "contig_name", ",", "cluster", ",", "variant_list", ")", ":", "ref_nuc_range", "=", "sequence_meta", ".", "variant", ".", "nucleotide_range", "(", ")", "if", "ref_nuc_range", "is", "None", ":", "return", "None", "bases", "=", "[", "]", "ctg_nts", "=", "[", "]", "ref_nts", "=", "[", "]", "smtls_total_depths", "=", "[", "]", "smtls_nts", "=", "[", "]", "smtls_depths", "=", "[", "]", "contig_positions", "=", "[", "]", "for", "ref_position", "in", "range", "(", "ref_nuc_range", "[", "0", "]", ",", "ref_nuc_range", "[", "1", "]", "+", "1", ",", "1", ")", ":", "nucmer_match", "=", "cluster", ".", "assembly_compare", ".", "nucmer_hit_containing_reference_position", "(", "cluster", ".", "assembly_compare", ".", "nucmer_hits", ",", "cluster", ".", "ref_sequence", ".", "id", ",", "ref_position", ",", "qry_name", "=", "contig_name", ")", "if", "nucmer_match", "is", "not", "None", ":", "# work out contig position. Needs indels variants to correct the position", "ref_nts", ".", "append", "(", "cluster", ".", "ref_sequence", "[", "ref_position", "]", ")", "contig_position", ",", "in_indel", "=", "nucmer_match", ".", "qry_coords_from_ref_coord", "(", "ref_position", ",", "variant_list", ")", "contig_positions", ".", "append", "(", "contig_position", ")", "bases", ",", "total_depth", ",", "base_depths", "=", "cluster", ".", "samtools_vars", ".", "get_depths_at_position", "(", "contig_name", ",", "contig_position", ")", "ctg_nts", ".", "append", "(", "cluster", ".", "assembly", ".", "sequences", "[", "contig_name", "]", "[", "contig_position", "]", ")", "smtls_nts", ".", "append", "(", "bases", ")", "smtls_total_depths", ".", "append", "(", "total_depth", ")", "smtls_depths", ".", "append", "(", "base_depths", ")", "ctg_nts", "=", "';'", ".", "join", "(", "ctg_nts", ")", "if", "len", "(", "ctg_nts", ")", "else", "'.'", "ref_nts", "=", "';'", ".", "join", "(", "ref_nts", ")", "if", "len", "(", "ref_nts", ")", "else", "'.'", "smtls_nts", "=", "';'", ".", "join", "(", "smtls_nts", ")", "if", "len", "(", "smtls_nts", ")", "else", "'.'", "smtls_total_depths", "=", "';'", ".", "join", "(", "[", "str", "(", "x", ")", "for", "x", "in", "smtls_total_depths", "]", ")", "if", "len", "(", "smtls_total_depths", ")", "else", "'.'", "smtls_depths", "=", "';'", ".", "join", "(", "[", "str", "(", "x", ")", "for", "x", "in", "smtls_depths", "]", ")", "if", "len", "(", "smtls_depths", ")", "else", "'.'", "ctg_start", "=", "str", "(", "min", "(", "contig_positions", ")", "+", "1", ")", "if", "contig_positions", "is", "not", "None", "else", "'.'", "ctg_end", "=", "str", "(", "max", "(", "contig_positions", ")", "+", "1", ")", "if", "contig_positions", "is", "not", "None", "else", "'.'", "return", "[", "str", "(", "x", ")", "for", "x", "in", "[", "ref_nuc_range", "[", "0", "]", "+", "1", ",", "ref_nuc_range", "[", "1", "]", "+", "1", ",", "ref_nts", ",", "ctg_start", ",", "ctg_end", ",", "ctg_nts", ",", "smtls_total_depths", ",", "smtls_nts", ",", "smtls_depths", "]", "]" ]
Input is a known variants, as sequence_metadata object. The assumption is that both the reference and the assembly have the variant type, not wild type. The list variant_list should be a list of pymummer.variant.Variant objects, only contaning variants to the relevant query contig
[ "Input", "is", "a", "known", "variants", "as", "sequence_metadata", "object", ".", "The", "assumption", "is", "that", "both", "the", "reference", "and", "the", "assembly", "have", "the", "variant", "type", "not", "wild", "type", ".", "The", "list", "variant_list", "should", "be", "a", "list", "of", "pymummer", ".", "variant", ".", "Variant", "objects", "only", "contaning", "variants", "to", "the", "relevant", "query", "contig" ]
16a0b1916ce0e886bd22550ba2d648542977001b
https://github.com/sanger-pathogens/ariba/blob/16a0b1916ce0e886bd22550ba2d648542977001b/ariba/report.py#L85-L136
2,239
ethereum/eth-abi
eth_abi/utils/string.py
abbr
def abbr(value: Any, limit: int=20) -> str: """ Converts a value into its string representation and abbreviates that representation based on the given length `limit` if necessary. """ rep = repr(value) if len(rep) > limit: if limit < 3: raise ValueError('Abbreviation limit may not be less than 3') rep = rep[:limit - 3] + '...' return rep
python
def abbr(value: Any, limit: int=20) -> str: """ Converts a value into its string representation and abbreviates that representation based on the given length `limit` if necessary. """ rep = repr(value) if len(rep) > limit: if limit < 3: raise ValueError('Abbreviation limit may not be less than 3') rep = rep[:limit - 3] + '...' return rep
[ "def", "abbr", "(", "value", ":", "Any", ",", "limit", ":", "int", "=", "20", ")", "->", "str", ":", "rep", "=", "repr", "(", "value", ")", "if", "len", "(", "rep", ")", ">", "limit", ":", "if", "limit", "<", "3", ":", "raise", "ValueError", "(", "'Abbreviation limit may not be less than 3'", ")", "rep", "=", "rep", "[", ":", "limit", "-", "3", "]", "+", "'...'", "return", "rep" ]
Converts a value into its string representation and abbreviates that representation based on the given length `limit` if necessary.
[ "Converts", "a", "value", "into", "its", "string", "representation", "and", "abbreviates", "that", "representation", "based", "on", "the", "given", "length", "limit", "if", "necessary", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/utils/string.py#L6-L19
2,240
ethereum/eth-abi
eth_abi/encoding.py
BaseEncoder.invalidate_value
def invalidate_value( cls, value: Any, exc: Type[Exception]=EncodingTypeError, msg: Optional[str]=None, ) -> None: """ Throws a standard exception for when a value is not encodable by an encoder. """ raise exc( "Value `{rep}` of type {typ} cannot be encoded by {cls}{msg}".format( rep=abbr(value), typ=type(value), cls=cls.__name__, msg="" if msg is None else (": " + msg), ) )
python
def invalidate_value( cls, value: Any, exc: Type[Exception]=EncodingTypeError, msg: Optional[str]=None, ) -> None: """ Throws a standard exception for when a value is not encodable by an encoder. """ raise exc( "Value `{rep}` of type {typ} cannot be encoded by {cls}{msg}".format( rep=abbr(value), typ=type(value), cls=cls.__name__, msg="" if msg is None else (": " + msg), ) )
[ "def", "invalidate_value", "(", "cls", ",", "value", ":", "Any", ",", "exc", ":", "Type", "[", "Exception", "]", "=", "EncodingTypeError", ",", "msg", ":", "Optional", "[", "str", "]", "=", "None", ",", ")", "->", "None", ":", "raise", "exc", "(", "\"Value `{rep}` of type {typ} cannot be encoded by {cls}{msg}\"", ".", "format", "(", "rep", "=", "abbr", "(", "value", ")", ",", "typ", "=", "type", "(", "value", ")", ",", "cls", "=", "cls", ".", "__name__", ",", "msg", "=", "\"\"", "if", "msg", "is", "None", "else", "(", "\": \"", "+", "msg", ")", ",", ")", ")" ]
Throws a standard exception for when a value is not encodable by an encoder.
[ "Throws", "a", "standard", "exception", "for", "when", "a", "value", "is", "not", "encodable", "by", "an", "encoder", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/encoding.py#L78-L95
2,241
ethereum/eth-abi
eth_abi/base.py
parse_tuple_type_str
def parse_tuple_type_str(old_from_type_str): """ Used by BaseCoder subclasses as a convenience for implementing the ``from_type_str`` method required by ``ABIRegistry``. Useful if normalizing then parsing a tuple type string is required in that method. """ @functools.wraps(old_from_type_str) def new_from_type_str(cls, type_str, registry): normalized_type_str = normalize(type_str) abi_type = parse(normalized_type_str) type_str_repr = repr(type_str) if type_str != normalized_type_str: type_str_repr = '{} (normalized to {})'.format( type_str_repr, repr(normalized_type_str), ) if not isinstance(abi_type, TupleType): raise ValueError( 'Cannot create {} for non-tuple type {}'.format( cls.__name__, type_str_repr, ) ) abi_type.validate() return old_from_type_str(cls, abi_type, registry) return classmethod(new_from_type_str)
python
def parse_tuple_type_str(old_from_type_str): """ Used by BaseCoder subclasses as a convenience for implementing the ``from_type_str`` method required by ``ABIRegistry``. Useful if normalizing then parsing a tuple type string is required in that method. """ @functools.wraps(old_from_type_str) def new_from_type_str(cls, type_str, registry): normalized_type_str = normalize(type_str) abi_type = parse(normalized_type_str) type_str_repr = repr(type_str) if type_str != normalized_type_str: type_str_repr = '{} (normalized to {})'.format( type_str_repr, repr(normalized_type_str), ) if not isinstance(abi_type, TupleType): raise ValueError( 'Cannot create {} for non-tuple type {}'.format( cls.__name__, type_str_repr, ) ) abi_type.validate() return old_from_type_str(cls, abi_type, registry) return classmethod(new_from_type_str)
[ "def", "parse_tuple_type_str", "(", "old_from_type_str", ")", ":", "@", "functools", ".", "wraps", "(", "old_from_type_str", ")", "def", "new_from_type_str", "(", "cls", ",", "type_str", ",", "registry", ")", ":", "normalized_type_str", "=", "normalize", "(", "type_str", ")", "abi_type", "=", "parse", "(", "normalized_type_str", ")", "type_str_repr", "=", "repr", "(", "type_str", ")", "if", "type_str", "!=", "normalized_type_str", ":", "type_str_repr", "=", "'{} (normalized to {})'", ".", "format", "(", "type_str_repr", ",", "repr", "(", "normalized_type_str", ")", ",", ")", "if", "not", "isinstance", "(", "abi_type", ",", "TupleType", ")", ":", "raise", "ValueError", "(", "'Cannot create {} for non-tuple type {}'", ".", "format", "(", "cls", ".", "__name__", ",", "type_str_repr", ",", ")", ")", "abi_type", ".", "validate", "(", ")", "return", "old_from_type_str", "(", "cls", ",", "abi_type", ",", "registry", ")", "return", "classmethod", "(", "new_from_type_str", ")" ]
Used by BaseCoder subclasses as a convenience for implementing the ``from_type_str`` method required by ``ABIRegistry``. Useful if normalizing then parsing a tuple type string is required in that method.
[ "Used", "by", "BaseCoder", "subclasses", "as", "a", "convenience", "for", "implementing", "the", "from_type_str", "method", "required", "by", "ABIRegistry", ".", "Useful", "if", "normalizing", "then", "parsing", "a", "tuple", "type", "string", "is", "required", "in", "that", "method", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/base.py#L80-L110
2,242
ethereum/eth-abi
eth_abi/decoding.py
ContextFramesBytesIO.seek_in_frame
def seek_in_frame(self, pos, *args, **kwargs): """ Seeks relative to the total offset of the current contextual frames. """ super().seek(self._total_offset + pos, *args, **kwargs)
python
def seek_in_frame(self, pos, *args, **kwargs): """ Seeks relative to the total offset of the current contextual frames. """ super().seek(self._total_offset + pos, *args, **kwargs)
[ "def", "seek_in_frame", "(", "self", ",", "pos", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "super", "(", ")", ".", "seek", "(", "self", ".", "_total_offset", "+", "pos", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Seeks relative to the total offset of the current contextual frames.
[ "Seeks", "relative", "to", "the", "total", "offset", "of", "the", "current", "contextual", "frames", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/decoding.py#L80-L84
2,243
ethereum/eth-abi
eth_abi/decoding.py
ContextFramesBytesIO.push_frame
def push_frame(self, offset): """ Pushes a new contextual frame onto the stack with the given offset and a return position at the current cursor position then seeks to the new total offset. """ self._frames.append((offset, self.tell())) self._total_offset += offset self.seek_in_frame(0)
python
def push_frame(self, offset): """ Pushes a new contextual frame onto the stack with the given offset and a return position at the current cursor position then seeks to the new total offset. """ self._frames.append((offset, self.tell())) self._total_offset += offset self.seek_in_frame(0)
[ "def", "push_frame", "(", "self", ",", "offset", ")", ":", "self", ".", "_frames", ".", "append", "(", "(", "offset", ",", "self", ".", "tell", "(", ")", ")", ")", "self", ".", "_total_offset", "+=", "offset", "self", ".", "seek_in_frame", "(", "0", ")" ]
Pushes a new contextual frame onto the stack with the given offset and a return position at the current cursor position then seeks to the new total offset.
[ "Pushes", "a", "new", "contextual", "frame", "onto", "the", "stack", "with", "the", "given", "offset", "and", "a", "return", "position", "at", "the", "current", "cursor", "position", "then", "seeks", "to", "the", "new", "total", "offset", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/decoding.py#L86-L95
2,244
ethereum/eth-abi
eth_abi/decoding.py
ContextFramesBytesIO.pop_frame
def pop_frame(self): """ Pops the current contextual frame off of the stack and returns the cursor to the frame's return position. """ try: offset, return_pos = self._frames.pop() except IndexError: raise IndexError('no frames to pop') self._total_offset -= offset self.seek(return_pos)
python
def pop_frame(self): """ Pops the current contextual frame off of the stack and returns the cursor to the frame's return position. """ try: offset, return_pos = self._frames.pop() except IndexError: raise IndexError('no frames to pop') self._total_offset -= offset self.seek(return_pos)
[ "def", "pop_frame", "(", "self", ")", ":", "try", ":", "offset", ",", "return_pos", "=", "self", ".", "_frames", ".", "pop", "(", ")", "except", "IndexError", ":", "raise", "IndexError", "(", "'no frames to pop'", ")", "self", ".", "_total_offset", "-=", "offset", "self", ".", "seek", "(", "return_pos", ")" ]
Pops the current contextual frame off of the stack and returns the cursor to the frame's return position.
[ "Pops", "the", "current", "contextual", "frame", "off", "of", "the", "stack", "and", "returns", "the", "cursor", "to", "the", "frame", "s", "return", "position", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/decoding.py#L97-L108
2,245
ethereum/eth-abi
eth_abi/registry.py
has_arrlist
def has_arrlist(type_str): """ A predicate that matches a type string with an array dimension list. """ try: abi_type = grammar.parse(type_str) except exceptions.ParseError: return False return abi_type.arrlist is not None
python
def has_arrlist(type_str): """ A predicate that matches a type string with an array dimension list. """ try: abi_type = grammar.parse(type_str) except exceptions.ParseError: return False return abi_type.arrlist is not None
[ "def", "has_arrlist", "(", "type_str", ")", ":", "try", ":", "abi_type", "=", "grammar", ".", "parse", "(", "type_str", ")", "except", "exceptions", ".", "ParseError", ":", "return", "False", "return", "abi_type", ".", "arrlist", "is", "not", "None" ]
A predicate that matches a type string with an array dimension list.
[ "A", "predicate", "that", "matches", "a", "type", "string", "with", "an", "array", "dimension", "list", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/registry.py#L258-L267
2,246
ethereum/eth-abi
eth_abi/registry.py
is_base_tuple
def is_base_tuple(type_str): """ A predicate that matches a tuple type with no array dimension list. """ try: abi_type = grammar.parse(type_str) except exceptions.ParseError: return False return isinstance(abi_type, grammar.TupleType) and abi_type.arrlist is None
python
def is_base_tuple(type_str): """ A predicate that matches a tuple type with no array dimension list. """ try: abi_type = grammar.parse(type_str) except exceptions.ParseError: return False return isinstance(abi_type, grammar.TupleType) and abi_type.arrlist is None
[ "def", "is_base_tuple", "(", "type_str", ")", ":", "try", ":", "abi_type", "=", "grammar", ".", "parse", "(", "type_str", ")", "except", "exceptions", ".", "ParseError", ":", "return", "False", "return", "isinstance", "(", "abi_type", ",", "grammar", ".", "TupleType", ")", "and", "abi_type", ".", "arrlist", "is", "None" ]
A predicate that matches a tuple type with no array dimension list.
[ "A", "predicate", "that", "matches", "a", "tuple", "type", "with", "no", "array", "dimension", "list", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/registry.py#L270-L279
2,247
ethereum/eth-abi
eth_abi/registry.py
ABIRegistry.register
def register(self, lookup: Lookup, encoder: Encoder, decoder: Decoder, label: str=None) -> None: """ Registers the given ``encoder`` and ``decoder`` under the given ``lookup``. A unique string label may be optionally provided that can be used to refer to the registration by name. :param lookup: A type string or type string matcher function (predicate). When the registry is queried with a type string ``query`` to determine which encoder or decoder to use, ``query`` will be checked against every registration in the registry. If a registration was created with a type string for ``lookup``, it will be considered a match if ``lookup == query``. If a registration was created with a matcher function for ``lookup``, it will be considered a match if ``lookup(query) is True``. If more than one registration is found to be a match, then an exception is raised. :param encoder: An encoder callable or class to use if ``lookup`` matches a query. If ``encoder`` is a callable, it must accept a python value and return a ``bytes`` value. If ``encoder`` is a class, it must be a valid subclass of :any:`encoding.BaseEncoder` and must also implement the :any:`from_type_str` method on :any:`base.BaseCoder`. :param decoder: A decoder callable or class to use if ``lookup`` matches a query. If ``decoder`` is a callable, it must accept a stream-like object of bytes and return a python value. If ``decoder`` is a class, it must be a valid subclass of :any:`decoding.BaseDecoder` and must also implement the :any:`from_type_str` method on :any:`base.BaseCoder`. :param label: An optional label that can be used to refer to this registration by name. This label can be used to unregister an entry in the registry via the :any:`unregister` method and its variants. """ self.register_encoder(lookup, encoder, label=label) self.register_decoder(lookup, decoder, label=label)
python
def register(self, lookup: Lookup, encoder: Encoder, decoder: Decoder, label: str=None) -> None: """ Registers the given ``encoder`` and ``decoder`` under the given ``lookup``. A unique string label may be optionally provided that can be used to refer to the registration by name. :param lookup: A type string or type string matcher function (predicate). When the registry is queried with a type string ``query`` to determine which encoder or decoder to use, ``query`` will be checked against every registration in the registry. If a registration was created with a type string for ``lookup``, it will be considered a match if ``lookup == query``. If a registration was created with a matcher function for ``lookup``, it will be considered a match if ``lookup(query) is True``. If more than one registration is found to be a match, then an exception is raised. :param encoder: An encoder callable or class to use if ``lookup`` matches a query. If ``encoder`` is a callable, it must accept a python value and return a ``bytes`` value. If ``encoder`` is a class, it must be a valid subclass of :any:`encoding.BaseEncoder` and must also implement the :any:`from_type_str` method on :any:`base.BaseCoder`. :param decoder: A decoder callable or class to use if ``lookup`` matches a query. If ``decoder`` is a callable, it must accept a stream-like object of bytes and return a python value. If ``decoder`` is a class, it must be a valid subclass of :any:`decoding.BaseDecoder` and must also implement the :any:`from_type_str` method on :any:`base.BaseCoder`. :param label: An optional label that can be used to refer to this registration by name. This label can be used to unregister an entry in the registry via the :any:`unregister` method and its variants. """ self.register_encoder(lookup, encoder, label=label) self.register_decoder(lookup, decoder, label=label)
[ "def", "register", "(", "self", ",", "lookup", ":", "Lookup", ",", "encoder", ":", "Encoder", ",", "decoder", ":", "Decoder", ",", "label", ":", "str", "=", "None", ")", "->", "None", ":", "self", ".", "register_encoder", "(", "lookup", ",", "encoder", ",", "label", "=", "label", ")", "self", ".", "register_decoder", "(", "lookup", ",", "decoder", ",", "label", "=", "label", ")" ]
Registers the given ``encoder`` and ``decoder`` under the given ``lookup``. A unique string label may be optionally provided that can be used to refer to the registration by name. :param lookup: A type string or type string matcher function (predicate). When the registry is queried with a type string ``query`` to determine which encoder or decoder to use, ``query`` will be checked against every registration in the registry. If a registration was created with a type string for ``lookup``, it will be considered a match if ``lookup == query``. If a registration was created with a matcher function for ``lookup``, it will be considered a match if ``lookup(query) is True``. If more than one registration is found to be a match, then an exception is raised. :param encoder: An encoder callable or class to use if ``lookup`` matches a query. If ``encoder`` is a callable, it must accept a python value and return a ``bytes`` value. If ``encoder`` is a class, it must be a valid subclass of :any:`encoding.BaseEncoder` and must also implement the :any:`from_type_str` method on :any:`base.BaseCoder`. :param decoder: A decoder callable or class to use if ``lookup`` matches a query. If ``decoder`` is a callable, it must accept a stream-like object of bytes and return a python value. If ``decoder`` is a class, it must be a valid subclass of :any:`decoding.BaseDecoder` and must also implement the :any:`from_type_str` method on :any:`base.BaseCoder`. :param label: An optional label that can be used to refer to this registration by name. This label can be used to unregister an entry in the registry via the :any:`unregister` method and its variants.
[ "Registers", "the", "given", "encoder", "and", "decoder", "under", "the", "given", "lookup", ".", "A", "unique", "string", "label", "may", "be", "optionally", "provided", "that", "can", "be", "used", "to", "refer", "to", "the", "registration", "by", "name", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/registry.py#L395-L431
2,248
ethereum/eth-abi
eth_abi/registry.py
ABIRegistry.unregister
def unregister(self, label: str) -> None: """ Unregisters the entries in the encoder and decoder registries which have the label ``label``. """ self.unregister_encoder(label) self.unregister_decoder(label)
python
def unregister(self, label: str) -> None: """ Unregisters the entries in the encoder and decoder registries which have the label ``label``. """ self.unregister_encoder(label) self.unregister_decoder(label)
[ "def", "unregister", "(", "self", ",", "label", ":", "str", ")", "->", "None", ":", "self", ".", "unregister_encoder", "(", "label", ")", "self", ".", "unregister_decoder", "(", "label", ")" ]
Unregisters the entries in the encoder and decoder registries which have the label ``label``.
[ "Unregisters", "the", "entries", "in", "the", "encoder", "and", "decoder", "registries", "which", "have", "the", "label", "label", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/registry.py#L433-L439
2,249
ethereum/eth-abi
eth_abi/registry.py
ABIRegistry.copy
def copy(self): """ Copies a registry such that new registrations can be made or existing registrations can be unregistered without affecting any instance from which a copy was obtained. This is useful if an existing registry fulfills most of a user's needs but requires one or two modifications. In that case, a copy of that registry can be obtained and the necessary changes made without affecting the original registry. """ cpy = type(self)() cpy._encoders = copy.copy(self._encoders) cpy._decoders = copy.copy(self._decoders) return cpy
python
def copy(self): """ Copies a registry such that new registrations can be made or existing registrations can be unregistered without affecting any instance from which a copy was obtained. This is useful if an existing registry fulfills most of a user's needs but requires one or two modifications. In that case, a copy of that registry can be obtained and the necessary changes made without affecting the original registry. """ cpy = type(self)() cpy._encoders = copy.copy(self._encoders) cpy._decoders = copy.copy(self._decoders) return cpy
[ "def", "copy", "(", "self", ")", ":", "cpy", "=", "type", "(", "self", ")", "(", ")", "cpy", ".", "_encoders", "=", "copy", ".", "copy", "(", "self", ".", "_encoders", ")", "cpy", ".", "_decoders", "=", "copy", ".", "copy", "(", "self", ".", "_decoders", ")", "return", "cpy" ]
Copies a registry such that new registrations can be made or existing registrations can be unregistered without affecting any instance from which a copy was obtained. This is useful if an existing registry fulfills most of a user's needs but requires one or two modifications. In that case, a copy of that registry can be obtained and the necessary changes made without affecting the original registry.
[ "Copies", "a", "registry", "such", "that", "new", "registrations", "can", "be", "made", "or", "existing", "registrations", "can", "be", "unregistered", "without", "affecting", "any", "instance", "from", "which", "a", "copy", "was", "obtained", ".", "This", "is", "useful", "if", "an", "existing", "registry", "fulfills", "most", "of", "a", "user", "s", "needs", "but", "requires", "one", "or", "two", "modifications", ".", "In", "that", "case", "a", "copy", "of", "that", "registry", "can", "be", "obtained", "and", "the", "necessary", "changes", "made", "without", "affecting", "the", "original", "registry", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/registry.py#L463-L477
2,250
ethereum/eth-abi
eth_abi/codec.py
ABIEncoder.encode_single
def encode_single(self, typ: TypeStr, arg: Any) -> bytes: """ Encodes the python value ``arg`` as a binary value of the ABI type ``typ``. :param typ: The string representation of the ABI type that will be used for encoding e.g. ``'uint256'``, ``'bytes[]'``, ``'(int,int)'``, etc. :param arg: The python value to be encoded. :returns: The binary representation of the python value ``arg`` as a value of the ABI type ``typ``. """ encoder = self._registry.get_encoder(typ) return encoder(arg)
python
def encode_single(self, typ: TypeStr, arg: Any) -> bytes: """ Encodes the python value ``arg`` as a binary value of the ABI type ``typ``. :param typ: The string representation of the ABI type that will be used for encoding e.g. ``'uint256'``, ``'bytes[]'``, ``'(int,int)'``, etc. :param arg: The python value to be encoded. :returns: The binary representation of the python value ``arg`` as a value of the ABI type ``typ``. """ encoder = self._registry.get_encoder(typ) return encoder(arg)
[ "def", "encode_single", "(", "self", ",", "typ", ":", "TypeStr", ",", "arg", ":", "Any", ")", "->", "bytes", ":", "encoder", "=", "self", ".", "_registry", ".", "get_encoder", "(", "typ", ")", "return", "encoder", "(", "arg", ")" ]
Encodes the python value ``arg`` as a binary value of the ABI type ``typ``. :param typ: The string representation of the ABI type that will be used for encoding e.g. ``'uint256'``, ``'bytes[]'``, ``'(int,int)'``, etc. :param arg: The python value to be encoded. :returns: The binary representation of the python value ``arg`` as a value of the ABI type ``typ``.
[ "Encodes", "the", "python", "value", "arg", "as", "a", "binary", "value", "of", "the", "ABI", "type", "typ", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/codec.py#L50-L65
2,251
ethereum/eth-abi
eth_abi/codec.py
ABIEncoder.encode_abi
def encode_abi(self, types: Iterable[TypeStr], args: Iterable[Any]) -> bytes: """ Encodes the python values in ``args`` as a sequence of binary values of the ABI types in ``types`` via the head-tail mechanism. :param types: An iterable of string representations of the ABI types that will be used for encoding e.g. ``('uint256', 'bytes[]', '(int,int)')`` :param args: An iterable of python values to be encoded. :returns: The head-tail encoded binary representation of the python values in ``args`` as values of the ABI types in ``types``. """ encoders = [ self._registry.get_encoder(type_str) for type_str in types ] encoder = TupleEncoder(encoders=encoders) return encoder(args)
python
def encode_abi(self, types: Iterable[TypeStr], args: Iterable[Any]) -> bytes: """ Encodes the python values in ``args`` as a sequence of binary values of the ABI types in ``types`` via the head-tail mechanism. :param types: An iterable of string representations of the ABI types that will be used for encoding e.g. ``('uint256', 'bytes[]', '(int,int)')`` :param args: An iterable of python values to be encoded. :returns: The head-tail encoded binary representation of the python values in ``args`` as values of the ABI types in ``types``. """ encoders = [ self._registry.get_encoder(type_str) for type_str in types ] encoder = TupleEncoder(encoders=encoders) return encoder(args)
[ "def", "encode_abi", "(", "self", ",", "types", ":", "Iterable", "[", "TypeStr", "]", ",", "args", ":", "Iterable", "[", "Any", "]", ")", "->", "bytes", ":", "encoders", "=", "[", "self", ".", "_registry", ".", "get_encoder", "(", "type_str", ")", "for", "type_str", "in", "types", "]", "encoder", "=", "TupleEncoder", "(", "encoders", "=", "encoders", ")", "return", "encoder", "(", "args", ")" ]
Encodes the python values in ``args`` as a sequence of binary values of the ABI types in ``types`` via the head-tail mechanism. :param types: An iterable of string representations of the ABI types that will be used for encoding e.g. ``('uint256', 'bytes[]', '(int,int)')`` :param args: An iterable of python values to be encoded. :returns: The head-tail encoded binary representation of the python values in ``args`` as values of the ABI types in ``types``.
[ "Encodes", "the", "python", "values", "in", "args", "as", "a", "sequence", "of", "binary", "values", "of", "the", "ABI", "types", "in", "types", "via", "the", "head", "-", "tail", "mechanism", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/codec.py#L67-L87
2,252
ethereum/eth-abi
eth_abi/codec.py
ABIEncoder.is_encodable
def is_encodable(self, typ: TypeStr, arg: Any) -> bool: """ Determines if the python value ``arg`` is encodable as a value of the ABI type ``typ``. :param typ: A string representation for the ABI type against which the python value ``arg`` will be checked e.g. ``'uint256'``, ``'bytes[]'``, ``'(int,int)'``, etc. :param arg: The python value whose encodability should be checked. :returns: ``True`` if ``arg`` is encodable as a value of the ABI type ``typ``. Otherwise, ``False``. """ encoder = self._registry.get_encoder(typ) try: encoder.validate_value(arg) except EncodingError: return False except AttributeError: try: encoder(arg) except EncodingError: return False return True
python
def is_encodable(self, typ: TypeStr, arg: Any) -> bool: """ Determines if the python value ``arg`` is encodable as a value of the ABI type ``typ``. :param typ: A string representation for the ABI type against which the python value ``arg`` will be checked e.g. ``'uint256'``, ``'bytes[]'``, ``'(int,int)'``, etc. :param arg: The python value whose encodability should be checked. :returns: ``True`` if ``arg`` is encodable as a value of the ABI type ``typ``. Otherwise, ``False``. """ encoder = self._registry.get_encoder(typ) try: encoder.validate_value(arg) except EncodingError: return False except AttributeError: try: encoder(arg) except EncodingError: return False return True
[ "def", "is_encodable", "(", "self", ",", "typ", ":", "TypeStr", ",", "arg", ":", "Any", ")", "->", "bool", ":", "encoder", "=", "self", ".", "_registry", ".", "get_encoder", "(", "typ", ")", "try", ":", "encoder", ".", "validate_value", "(", "arg", ")", "except", "EncodingError", ":", "return", "False", "except", "AttributeError", ":", "try", ":", "encoder", "(", "arg", ")", "except", "EncodingError", ":", "return", "False", "return", "True" ]
Determines if the python value ``arg`` is encodable as a value of the ABI type ``typ``. :param typ: A string representation for the ABI type against which the python value ``arg`` will be checked e.g. ``'uint256'``, ``'bytes[]'``, ``'(int,int)'``, etc. :param arg: The python value whose encodability should be checked. :returns: ``True`` if ``arg`` is encodable as a value of the ABI type ``typ``. Otherwise, ``False``.
[ "Determines", "if", "the", "python", "value", "arg", "is", "encodable", "as", "a", "value", "of", "the", "ABI", "type", "typ", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/codec.py#L89-L114
2,253
ethereum/eth-abi
eth_abi/codec.py
ABIDecoder.decode_single
def decode_single(self, typ: TypeStr, data: Decodable) -> Any: """ Decodes the binary value ``data`` of the ABI type ``typ`` into its equivalent python value. :param typ: The string representation of the ABI type that will be used for decoding e.g. ``'uint256'``, ``'bytes[]'``, ``'(int,int)'``, etc. :param data: The binary value to be decoded. :returns: The equivalent python value of the ABI value represented in ``data``. """ if not is_bytes(data): raise TypeError("The `data` value must be of bytes type. Got {0}".format(type(data))) decoder = self._registry.get_decoder(typ) stream = ContextFramesBytesIO(data) return decoder(stream)
python
def decode_single(self, typ: TypeStr, data: Decodable) -> Any: """ Decodes the binary value ``data`` of the ABI type ``typ`` into its equivalent python value. :param typ: The string representation of the ABI type that will be used for decoding e.g. ``'uint256'``, ``'bytes[]'``, ``'(int,int)'``, etc. :param data: The binary value to be decoded. :returns: The equivalent python value of the ABI value represented in ``data``. """ if not is_bytes(data): raise TypeError("The `data` value must be of bytes type. Got {0}".format(type(data))) decoder = self._registry.get_decoder(typ) stream = ContextFramesBytesIO(data) return decoder(stream)
[ "def", "decode_single", "(", "self", ",", "typ", ":", "TypeStr", ",", "data", ":", "Decodable", ")", "->", "Any", ":", "if", "not", "is_bytes", "(", "data", ")", ":", "raise", "TypeError", "(", "\"The `data` value must be of bytes type. Got {0}\"", ".", "format", "(", "type", "(", "data", ")", ")", ")", "decoder", "=", "self", ".", "_registry", ".", "get_decoder", "(", "typ", ")", "stream", "=", "ContextFramesBytesIO", "(", "data", ")", "return", "decoder", "(", "stream", ")" ]
Decodes the binary value ``data`` of the ABI type ``typ`` into its equivalent python value. :param typ: The string representation of the ABI type that will be used for decoding e.g. ``'uint256'``, ``'bytes[]'``, ``'(int,int)'``, etc. :param data: The binary value to be decoded. :returns: The equivalent python value of the ABI value represented in ``data``.
[ "Decodes", "the", "binary", "value", "data", "of", "the", "ABI", "type", "typ", "into", "its", "equivalent", "python", "value", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/codec.py#L135-L153
2,254
ethereum/eth-abi
eth_abi/codec.py
ABIDecoder.decode_abi
def decode_abi(self, types: Iterable[TypeStr], data: Decodable) -> Tuple[Any, ...]: """ Decodes the binary value ``data`` as a sequence of values of the ABI types in ``types`` via the head-tail mechanism into a tuple of equivalent python values. :param types: An iterable of string representations of the ABI types that will be used for decoding e.g. ``('uint256', 'bytes[]', '(int,int)')`` :param data: The binary value to be decoded. :returns: A tuple of equivalent python values for the ABI values represented in ``data``. """ if not is_bytes(data): raise TypeError("The `data` value must be of bytes type. Got {0}".format(type(data))) decoders = [ self._registry.get_decoder(type_str) for type_str in types ] decoder = TupleDecoder(decoders=decoders) stream = ContextFramesBytesIO(data) return decoder(stream)
python
def decode_abi(self, types: Iterable[TypeStr], data: Decodable) -> Tuple[Any, ...]: """ Decodes the binary value ``data`` as a sequence of values of the ABI types in ``types`` via the head-tail mechanism into a tuple of equivalent python values. :param types: An iterable of string representations of the ABI types that will be used for decoding e.g. ``('uint256', 'bytes[]', '(int,int)')`` :param data: The binary value to be decoded. :returns: A tuple of equivalent python values for the ABI values represented in ``data``. """ if not is_bytes(data): raise TypeError("The `data` value must be of bytes type. Got {0}".format(type(data))) decoders = [ self._registry.get_decoder(type_str) for type_str in types ] decoder = TupleDecoder(decoders=decoders) stream = ContextFramesBytesIO(data) return decoder(stream)
[ "def", "decode_abi", "(", "self", ",", "types", ":", "Iterable", "[", "TypeStr", "]", ",", "data", ":", "Decodable", ")", "->", "Tuple", "[", "Any", ",", "...", "]", ":", "if", "not", "is_bytes", "(", "data", ")", ":", "raise", "TypeError", "(", "\"The `data` value must be of bytes type. Got {0}\"", ".", "format", "(", "type", "(", "data", ")", ")", ")", "decoders", "=", "[", "self", ".", "_registry", ".", "get_decoder", "(", "type_str", ")", "for", "type_str", "in", "types", "]", "decoder", "=", "TupleDecoder", "(", "decoders", "=", "decoders", ")", "stream", "=", "ContextFramesBytesIO", "(", "data", ")", "return", "decoder", "(", "stream", ")" ]
Decodes the binary value ``data`` as a sequence of values of the ABI types in ``types`` via the head-tail mechanism into a tuple of equivalent python values. :param types: An iterable of string representations of the ABI types that will be used for decoding e.g. ``('uint256', 'bytes[]', '(int,int)')`` :param data: The binary value to be decoded. :returns: A tuple of equivalent python values for the ABI values represented in ``data``.
[ "Decodes", "the", "binary", "value", "data", "as", "a", "sequence", "of", "values", "of", "the", "ABI", "types", "in", "types", "via", "the", "head", "-", "tail", "mechanism", "into", "a", "tuple", "of", "equivalent", "python", "values", "." ]
0a5cab0bdeae30b77efa667379427581784f1707
https://github.com/ethereum/eth-abi/blob/0a5cab0bdeae30b77efa667379427581784f1707/eth_abi/codec.py#L155-L179
2,255
aiortc/aioice
aioice/turn.py
create_turn_endpoint
async def create_turn_endpoint(protocol_factory, server_addr, username, password, lifetime=600, ssl=False, transport='udp'): """ Create datagram connection relayed over TURN. """ loop = asyncio.get_event_loop() if transport == 'tcp': _, inner_protocol = await loop.create_connection( lambda: TurnClientTcpProtocol(server_addr, username=username, password=password, lifetime=lifetime), host=server_addr[0], port=server_addr[1], ssl=ssl) else: _, inner_protocol = await loop.create_datagram_endpoint( lambda: TurnClientUdpProtocol(server_addr, username=username, password=password, lifetime=lifetime), remote_addr=server_addr) protocol = protocol_factory() transport = TurnTransport(protocol, inner_protocol) await transport._connect() return transport, protocol
python
async def create_turn_endpoint(protocol_factory, server_addr, username, password, lifetime=600, ssl=False, transport='udp'): """ Create datagram connection relayed over TURN. """ loop = asyncio.get_event_loop() if transport == 'tcp': _, inner_protocol = await loop.create_connection( lambda: TurnClientTcpProtocol(server_addr, username=username, password=password, lifetime=lifetime), host=server_addr[0], port=server_addr[1], ssl=ssl) else: _, inner_protocol = await loop.create_datagram_endpoint( lambda: TurnClientUdpProtocol(server_addr, username=username, password=password, lifetime=lifetime), remote_addr=server_addr) protocol = protocol_factory() transport = TurnTransport(protocol, inner_protocol) await transport._connect() return transport, protocol
[ "async", "def", "create_turn_endpoint", "(", "protocol_factory", ",", "server_addr", ",", "username", ",", "password", ",", "lifetime", "=", "600", ",", "ssl", "=", "False", ",", "transport", "=", "'udp'", ")", ":", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "if", "transport", "==", "'tcp'", ":", "_", ",", "inner_protocol", "=", "await", "loop", ".", "create_connection", "(", "lambda", ":", "TurnClientTcpProtocol", "(", "server_addr", ",", "username", "=", "username", ",", "password", "=", "password", ",", "lifetime", "=", "lifetime", ")", ",", "host", "=", "server_addr", "[", "0", "]", ",", "port", "=", "server_addr", "[", "1", "]", ",", "ssl", "=", "ssl", ")", "else", ":", "_", ",", "inner_protocol", "=", "await", "loop", ".", "create_datagram_endpoint", "(", "lambda", ":", "TurnClientUdpProtocol", "(", "server_addr", ",", "username", "=", "username", ",", "password", "=", "password", ",", "lifetime", "=", "lifetime", ")", ",", "remote_addr", "=", "server_addr", ")", "protocol", "=", "protocol_factory", "(", ")", "transport", "=", "TurnTransport", "(", "protocol", ",", "inner_protocol", ")", "await", "transport", ".", "_connect", "(", ")", "return", "transport", ",", "protocol" ]
Create datagram connection relayed over TURN.
[ "Create", "datagram", "connection", "relayed", "over", "TURN", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/turn.py#L276-L303
2,256
aiortc/aioice
aioice/turn.py
TurnClientMixin.connect
async def connect(self): """ Create a TURN allocation. """ request = stun.Message(message_method=stun.Method.ALLOCATE, message_class=stun.Class.REQUEST) request.attributes['LIFETIME'] = self.lifetime request.attributes['REQUESTED-TRANSPORT'] = UDP_TRANSPORT try: response, _ = await self.request(request) except exceptions.TransactionFailed as e: response = e.response if response.attributes['ERROR-CODE'][0] == 401: # update long-term credentials self.nonce = response.attributes['NONCE'] self.realm = response.attributes['REALM'] self.integrity_key = make_integrity_key(self.username, self.realm, self.password) # retry request with authentication request.transaction_id = random_transaction_id() response, _ = await self.request(request) self.relayed_address = response.attributes['XOR-RELAYED-ADDRESS'] logger.info('TURN allocation created %s', self.relayed_address) # periodically refresh allocation self.refresh_handle = asyncio.ensure_future(self.refresh()) return self.relayed_address
python
async def connect(self): """ Create a TURN allocation. """ request = stun.Message(message_method=stun.Method.ALLOCATE, message_class=stun.Class.REQUEST) request.attributes['LIFETIME'] = self.lifetime request.attributes['REQUESTED-TRANSPORT'] = UDP_TRANSPORT try: response, _ = await self.request(request) except exceptions.TransactionFailed as e: response = e.response if response.attributes['ERROR-CODE'][0] == 401: # update long-term credentials self.nonce = response.attributes['NONCE'] self.realm = response.attributes['REALM'] self.integrity_key = make_integrity_key(self.username, self.realm, self.password) # retry request with authentication request.transaction_id = random_transaction_id() response, _ = await self.request(request) self.relayed_address = response.attributes['XOR-RELAYED-ADDRESS'] logger.info('TURN allocation created %s', self.relayed_address) # periodically refresh allocation self.refresh_handle = asyncio.ensure_future(self.refresh()) return self.relayed_address
[ "async", "def", "connect", "(", "self", ")", ":", "request", "=", "stun", ".", "Message", "(", "message_method", "=", "stun", ".", "Method", ".", "ALLOCATE", ",", "message_class", "=", "stun", ".", "Class", ".", "REQUEST", ")", "request", ".", "attributes", "[", "'LIFETIME'", "]", "=", "self", ".", "lifetime", "request", ".", "attributes", "[", "'REQUESTED-TRANSPORT'", "]", "=", "UDP_TRANSPORT", "try", ":", "response", ",", "_", "=", "await", "self", ".", "request", "(", "request", ")", "except", "exceptions", ".", "TransactionFailed", "as", "e", ":", "response", "=", "e", ".", "response", "if", "response", ".", "attributes", "[", "'ERROR-CODE'", "]", "[", "0", "]", "==", "401", ":", "# update long-term credentials", "self", ".", "nonce", "=", "response", ".", "attributes", "[", "'NONCE'", "]", "self", ".", "realm", "=", "response", ".", "attributes", "[", "'REALM'", "]", "self", ".", "integrity_key", "=", "make_integrity_key", "(", "self", ".", "username", ",", "self", ".", "realm", ",", "self", ".", "password", ")", "# retry request with authentication", "request", ".", "transaction_id", "=", "random_transaction_id", "(", ")", "response", ",", "_", "=", "await", "self", ".", "request", "(", "request", ")", "self", ".", "relayed_address", "=", "response", ".", "attributes", "[", "'XOR-RELAYED-ADDRESS'", "]", "logger", ".", "info", "(", "'TURN allocation created %s'", ",", "self", ".", "relayed_address", ")", "# periodically refresh allocation", "self", ".", "refresh_handle", "=", "asyncio", ".", "ensure_future", "(", "self", ".", "refresh", "(", ")", ")", "return", "self", ".", "relayed_address" ]
Create a TURN allocation.
[ "Create", "a", "TURN", "allocation", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/turn.py#L70-L99
2,257
aiortc/aioice
aioice/turn.py
TurnClientMixin.delete
async def delete(self): """ Delete the TURN allocation. """ if self.refresh_handle: self.refresh_handle.cancel() self.refresh_handle = None request = stun.Message(message_method=stun.Method.REFRESH, message_class=stun.Class.REQUEST) request.attributes['LIFETIME'] = 0 await self.request(request) logger.info('TURN allocation deleted %s', self.relayed_address) if self.receiver: self.receiver.connection_lost(None)
python
async def delete(self): """ Delete the TURN allocation. """ if self.refresh_handle: self.refresh_handle.cancel() self.refresh_handle = None request = stun.Message(message_method=stun.Method.REFRESH, message_class=stun.Class.REQUEST) request.attributes['LIFETIME'] = 0 await self.request(request) logger.info('TURN allocation deleted %s', self.relayed_address) if self.receiver: self.receiver.connection_lost(None)
[ "async", "def", "delete", "(", "self", ")", ":", "if", "self", ".", "refresh_handle", ":", "self", ".", "refresh_handle", ".", "cancel", "(", ")", "self", ".", "refresh_handle", "=", "None", "request", "=", "stun", ".", "Message", "(", "message_method", "=", "stun", ".", "Method", ".", "REFRESH", ",", "message_class", "=", "stun", ".", "Class", ".", "REQUEST", ")", "request", ".", "attributes", "[", "'LIFETIME'", "]", "=", "0", "await", "self", ".", "request", "(", "request", ")", "logger", ".", "info", "(", "'TURN allocation deleted %s'", ",", "self", ".", "relayed_address", ")", "if", "self", ".", "receiver", ":", "self", ".", "receiver", ".", "connection_lost", "(", "None", ")" ]
Delete the TURN allocation.
[ "Delete", "the", "TURN", "allocation", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/turn.py#L130-L145
2,258
aiortc/aioice
aioice/turn.py
TurnClientMixin.refresh
async def refresh(self): """ Periodically refresh the TURN allocation. """ while True: await asyncio.sleep(5/6 * self.lifetime) request = stun.Message(message_method=stun.Method.REFRESH, message_class=stun.Class.REQUEST) request.attributes['LIFETIME'] = self.lifetime await self.request(request) logger.info('TURN allocation refreshed %s', self.relayed_address)
python
async def refresh(self): """ Periodically refresh the TURN allocation. """ while True: await asyncio.sleep(5/6 * self.lifetime) request = stun.Message(message_method=stun.Method.REFRESH, message_class=stun.Class.REQUEST) request.attributes['LIFETIME'] = self.lifetime await self.request(request) logger.info('TURN allocation refreshed %s', self.relayed_address)
[ "async", "def", "refresh", "(", "self", ")", ":", "while", "True", ":", "await", "asyncio", ".", "sleep", "(", "5", "/", "6", "*", "self", ".", "lifetime", ")", "request", "=", "stun", ".", "Message", "(", "message_method", "=", "stun", ".", "Method", ".", "REFRESH", ",", "message_class", "=", "stun", ".", "Class", ".", "REQUEST", ")", "request", ".", "attributes", "[", "'LIFETIME'", "]", "=", "self", ".", "lifetime", "await", "self", ".", "request", "(", "request", ")", "logger", ".", "info", "(", "'TURN allocation refreshed %s'", ",", "self", ".", "relayed_address", ")" ]
Periodically refresh the TURN allocation.
[ "Periodically", "refresh", "the", "TURN", "allocation", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/turn.py#L147-L159
2,259
aiortc/aioice
aioice/turn.py
TurnClientMixin.send_data
async def send_data(self, data, addr): """ Send data to a remote host via the TURN server. """ channel = self.peer_to_channel.get(addr) if channel is None: channel = self.channel_number self.channel_number += 1 self.channel_to_peer[channel] = addr self.peer_to_channel[addr] = channel # bind channel await self.channel_bind(channel, addr) header = struct.pack('!HH', channel, len(data)) self._send(header + data)
python
async def send_data(self, data, addr): """ Send data to a remote host via the TURN server. """ channel = self.peer_to_channel.get(addr) if channel is None: channel = self.channel_number self.channel_number += 1 self.channel_to_peer[channel] = addr self.peer_to_channel[addr] = channel # bind channel await self.channel_bind(channel, addr) header = struct.pack('!HH', channel, len(data)) self._send(header + data)
[ "async", "def", "send_data", "(", "self", ",", "data", ",", "addr", ")", ":", "channel", "=", "self", ".", "peer_to_channel", ".", "get", "(", "addr", ")", "if", "channel", "is", "None", ":", "channel", "=", "self", ".", "channel_number", "self", ".", "channel_number", "+=", "1", "self", ".", "channel_to_peer", "[", "channel", "]", "=", "addr", "self", ".", "peer_to_channel", "[", "addr", "]", "=", "channel", "# bind channel", "await", "self", ".", "channel_bind", "(", "channel", ",", "addr", ")", "header", "=", "struct", ".", "pack", "(", "'!HH'", ",", "channel", ",", "len", "(", "data", ")", ")", "self", ".", "_send", "(", "header", "+", "data", ")" ]
Send data to a remote host via the TURN server.
[ "Send", "data", "to", "a", "remote", "host", "via", "the", "TURN", "server", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/turn.py#L177-L192
2,260
aiortc/aioice
aioice/turn.py
TurnClientMixin.send_stun
def send_stun(self, message, addr): """ Send a STUN message to the TURN server. """ logger.debug('%s > %s %s', self, addr, message) self._send(bytes(message))
python
def send_stun(self, message, addr): """ Send a STUN message to the TURN server. """ logger.debug('%s > %s %s', self, addr, message) self._send(bytes(message))
[ "def", "send_stun", "(", "self", ",", "message", ",", "addr", ")", ":", "logger", ".", "debug", "(", "'%s > %s %s'", ",", "self", ",", "addr", ",", "message", ")", "self", ".", "_send", "(", "bytes", "(", "message", ")", ")" ]
Send a STUN message to the TURN server.
[ "Send", "a", "STUN", "message", "to", "the", "TURN", "server", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/turn.py#L194-L199
2,261
aiortc/aioice
aioice/turn.py
TurnTransport.get_extra_info
def get_extra_info(self, name, default=None): """ Return optional transport information. - `'related_address'`: the related address - `'sockname'`: the relayed address """ if name == 'related_address': return self.__inner_protocol.transport.get_extra_info('sockname') elif name == 'sockname': return self.__relayed_address return default
python
def get_extra_info(self, name, default=None): """ Return optional transport information. - `'related_address'`: the related address - `'sockname'`: the relayed address """ if name == 'related_address': return self.__inner_protocol.transport.get_extra_info('sockname') elif name == 'sockname': return self.__relayed_address return default
[ "def", "get_extra_info", "(", "self", ",", "name", ",", "default", "=", "None", ")", ":", "if", "name", "==", "'related_address'", ":", "return", "self", ".", "__inner_protocol", ".", "transport", ".", "get_extra_info", "(", "'sockname'", ")", "elif", "name", "==", "'sockname'", ":", "return", "self", ".", "__relayed_address", "return", "default" ]
Return optional transport information. - `'related_address'`: the related address - `'sockname'`: the relayed address
[ "Return", "optional", "transport", "information", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/turn.py#L250-L261
2,262
aiortc/aioice
aioice/turn.py
TurnTransport.sendto
def sendto(self, data, addr): """ Sends the `data` bytes to the remote peer given `addr`. This will bind a TURN channel as necessary. """ asyncio.ensure_future(self.__inner_protocol.send_data(data, addr))
python
def sendto(self, data, addr): """ Sends the `data` bytes to the remote peer given `addr`. This will bind a TURN channel as necessary. """ asyncio.ensure_future(self.__inner_protocol.send_data(data, addr))
[ "def", "sendto", "(", "self", ",", "data", ",", "addr", ")", ":", "asyncio", ".", "ensure_future", "(", "self", ".", "__inner_protocol", ".", "send_data", "(", "data", ",", "addr", ")", ")" ]
Sends the `data` bytes to the remote peer given `addr`. This will bind a TURN channel as necessary.
[ "Sends", "the", "data", "bytes", "to", "the", "remote", "peer", "given", "addr", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/turn.py#L263-L269
2,263
aiortc/aioice
aioice/candidate.py
candidate_foundation
def candidate_foundation(candidate_type, candidate_transport, base_address): """ See RFC 5245 - 4.1.1.3. Computing Foundations """ key = '%s|%s|%s' % (candidate_type, candidate_transport, base_address) return hashlib.md5(key.encode('ascii')).hexdigest()
python
def candidate_foundation(candidate_type, candidate_transport, base_address): """ See RFC 5245 - 4.1.1.3. Computing Foundations """ key = '%s|%s|%s' % (candidate_type, candidate_transport, base_address) return hashlib.md5(key.encode('ascii')).hexdigest()
[ "def", "candidate_foundation", "(", "candidate_type", ",", "candidate_transport", ",", "base_address", ")", ":", "key", "=", "'%s|%s|%s'", "%", "(", "candidate_type", ",", "candidate_transport", ",", "base_address", ")", "return", "hashlib", ".", "md5", "(", "key", ".", "encode", "(", "'ascii'", ")", ")", ".", "hexdigest", "(", ")" ]
See RFC 5245 - 4.1.1.3. Computing Foundations
[ "See", "RFC", "5245", "-", "4", ".", "1", ".", "1", ".", "3", ".", "Computing", "Foundations" ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/candidate.py#L5-L10
2,264
aiortc/aioice
aioice/candidate.py
candidate_priority
def candidate_priority(candidate_component, candidate_type, local_pref=65535): """ See RFC 5245 - 4.1.2.1. Recommended Formula """ if candidate_type == 'host': type_pref = 126 elif candidate_type == 'prflx': type_pref = 110 elif candidate_type == 'srflx': type_pref = 100 else: type_pref = 0 return (1 << 24) * type_pref + \ (1 << 8) * local_pref + \ (256 - candidate_component)
python
def candidate_priority(candidate_component, candidate_type, local_pref=65535): """ See RFC 5245 - 4.1.2.1. Recommended Formula """ if candidate_type == 'host': type_pref = 126 elif candidate_type == 'prflx': type_pref = 110 elif candidate_type == 'srflx': type_pref = 100 else: type_pref = 0 return (1 << 24) * type_pref + \ (1 << 8) * local_pref + \ (256 - candidate_component)
[ "def", "candidate_priority", "(", "candidate_component", ",", "candidate_type", ",", "local_pref", "=", "65535", ")", ":", "if", "candidate_type", "==", "'host'", ":", "type_pref", "=", "126", "elif", "candidate_type", "==", "'prflx'", ":", "type_pref", "=", "110", "elif", "candidate_type", "==", "'srflx'", ":", "type_pref", "=", "100", "else", ":", "type_pref", "=", "0", "return", "(", "1", "<<", "24", ")", "*", "type_pref", "+", "(", "1", "<<", "8", ")", "*", "local_pref", "+", "(", "256", "-", "candidate_component", ")" ]
See RFC 5245 - 4.1.2.1. Recommended Formula
[ "See", "RFC", "5245", "-", "4", ".", "1", ".", "2", ".", "1", ".", "Recommended", "Formula" ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/candidate.py#L13-L28
2,265
aiortc/aioice
aioice/candidate.py
Candidate.to_sdp
def to_sdp(self): """ Return a string representation suitable for SDP. """ sdp = '%s %d %s %d %s %d typ %s' % ( self.foundation, self.component, self.transport, self.priority, self.host, self.port, self.type) if self.related_address is not None: sdp += ' raddr %s' % self.related_address if self.related_port is not None: sdp += ' rport %s' % self.related_port if self.tcptype is not None: sdp += ' tcptype %s' % self.tcptype if self.generation is not None: sdp += ' generation %d' % self.generation return sdp
python
def to_sdp(self): """ Return a string representation suitable for SDP. """ sdp = '%s %d %s %d %s %d typ %s' % ( self.foundation, self.component, self.transport, self.priority, self.host, self.port, self.type) if self.related_address is not None: sdp += ' raddr %s' % self.related_address if self.related_port is not None: sdp += ' rport %s' % self.related_port if self.tcptype is not None: sdp += ' tcptype %s' % self.tcptype if self.generation is not None: sdp += ' generation %d' % self.generation return sdp
[ "def", "to_sdp", "(", "self", ")", ":", "sdp", "=", "'%s %d %s %d %s %d typ %s'", "%", "(", "self", ".", "foundation", ",", "self", ".", "component", ",", "self", ".", "transport", ",", "self", ".", "priority", ",", "self", ".", "host", ",", "self", ".", "port", ",", "self", ".", "type", ")", "if", "self", ".", "related_address", "is", "not", "None", ":", "sdp", "+=", "' raddr %s'", "%", "self", ".", "related_address", "if", "self", ".", "related_port", "is", "not", "None", ":", "sdp", "+=", "' rport %s'", "%", "self", ".", "related_port", "if", "self", ".", "tcptype", "is", "not", "None", ":", "sdp", "+=", "' tcptype %s'", "%", "self", ".", "tcptype", "if", "self", ".", "generation", "is", "not", "None", ":", "sdp", "+=", "' generation %d'", "%", "self", ".", "generation", "return", "sdp" ]
Return a string representation suitable for SDP.
[ "Return", "a", "string", "representation", "suitable", "for", "SDP", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/candidate.py#L85-L105
2,266
aiortc/aioice
aioice/candidate.py
Candidate.can_pair_with
def can_pair_with(self, other): """ A local candidate is paired with a remote candidate if and only if the two candidates have the same component ID and have the same IP address version. """ a = ipaddress.ip_address(self.host) b = ipaddress.ip_address(other.host) return ( self.component == other.component and self.transport.lower() == other.transport.lower() and a.version == b.version )
python
def can_pair_with(self, other): """ A local candidate is paired with a remote candidate if and only if the two candidates have the same component ID and have the same IP address version. """ a = ipaddress.ip_address(self.host) b = ipaddress.ip_address(other.host) return ( self.component == other.component and self.transport.lower() == other.transport.lower() and a.version == b.version )
[ "def", "can_pair_with", "(", "self", ",", "other", ")", ":", "a", "=", "ipaddress", ".", "ip_address", "(", "self", ".", "host", ")", "b", "=", "ipaddress", ".", "ip_address", "(", "other", ".", "host", ")", "return", "(", "self", ".", "component", "==", "other", ".", "component", "and", "self", ".", "transport", ".", "lower", "(", ")", "==", "other", ".", "transport", ".", "lower", "(", ")", "and", "a", ".", "version", "==", "b", ".", "version", ")" ]
A local candidate is paired with a remote candidate if and only if the two candidates have the same component ID and have the same IP address version.
[ "A", "local", "candidate", "is", "paired", "with", "a", "remote", "candidate", "if", "and", "only", "if", "the", "two", "candidates", "have", "the", "same", "component", "ID", "and", "have", "the", "same", "IP", "address", "version", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/candidate.py#L107-L119
2,267
aiortc/aioice
aioice/ice.py
candidate_pair_priority
def candidate_pair_priority(local, remote, ice_controlling): """ See RFC 5245 - 5.7.2. Computing Pair Priority and Ordering Pairs """ G = ice_controlling and local.priority or remote.priority D = ice_controlling and remote.priority or local.priority return (1 << 32) * min(G, D) + 2 * max(G, D) + (G > D and 1 or 0)
python
def candidate_pair_priority(local, remote, ice_controlling): """ See RFC 5245 - 5.7.2. Computing Pair Priority and Ordering Pairs """ G = ice_controlling and local.priority or remote.priority D = ice_controlling and remote.priority or local.priority return (1 << 32) * min(G, D) + 2 * max(G, D) + (G > D and 1 or 0)
[ "def", "candidate_pair_priority", "(", "local", ",", "remote", ",", "ice_controlling", ")", ":", "G", "=", "ice_controlling", "and", "local", ".", "priority", "or", "remote", ".", "priority", "D", "=", "ice_controlling", "and", "remote", ".", "priority", "or", "local", ".", "priority", "return", "(", "1", "<<", "32", ")", "*", "min", "(", "G", ",", "D", ")", "+", "2", "*", "max", "(", "G", ",", "D", ")", "+", "(", "G", ">", "D", "and", "1", "or", "0", ")" ]
See RFC 5245 - 5.7.2. Computing Pair Priority and Ordering Pairs
[ "See", "RFC", "5245", "-", "5", ".", "7", ".", "2", ".", "Computing", "Pair", "Priority", "and", "Ordering", "Pairs" ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L24-L30
2,268
aiortc/aioice
aioice/ice.py
get_host_addresses
def get_host_addresses(use_ipv4, use_ipv6): """ Get local IP addresses. """ addresses = [] for interface in netifaces.interfaces(): ifaddresses = netifaces.ifaddresses(interface) for address in ifaddresses.get(socket.AF_INET, []): if use_ipv4 and address['addr'] != '127.0.0.1': addresses.append(address['addr']) for address in ifaddresses.get(socket.AF_INET6, []): if use_ipv6 and address['addr'] != '::1' and '%' not in address['addr']: addresses.append(address['addr']) return addresses
python
def get_host_addresses(use_ipv4, use_ipv6): """ Get local IP addresses. """ addresses = [] for interface in netifaces.interfaces(): ifaddresses = netifaces.ifaddresses(interface) for address in ifaddresses.get(socket.AF_INET, []): if use_ipv4 and address['addr'] != '127.0.0.1': addresses.append(address['addr']) for address in ifaddresses.get(socket.AF_INET6, []): if use_ipv6 and address['addr'] != '::1' and '%' not in address['addr']: addresses.append(address['addr']) return addresses
[ "def", "get_host_addresses", "(", "use_ipv4", ",", "use_ipv6", ")", ":", "addresses", "=", "[", "]", "for", "interface", "in", "netifaces", ".", "interfaces", "(", ")", ":", "ifaddresses", "=", "netifaces", ".", "ifaddresses", "(", "interface", ")", "for", "address", "in", "ifaddresses", ".", "get", "(", "socket", ".", "AF_INET", ",", "[", "]", ")", ":", "if", "use_ipv4", "and", "address", "[", "'addr'", "]", "!=", "'127.0.0.1'", ":", "addresses", ".", "append", "(", "address", "[", "'addr'", "]", ")", "for", "address", "in", "ifaddresses", ".", "get", "(", "socket", ".", "AF_INET6", ",", "[", "]", ")", ":", "if", "use_ipv6", "and", "address", "[", "'addr'", "]", "!=", "'::1'", "and", "'%'", "not", "in", "address", "[", "'addr'", "]", ":", "addresses", ".", "append", "(", "address", "[", "'addr'", "]", ")", "return", "addresses" ]
Get local IP addresses.
[ "Get", "local", "IP", "addresses", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L33-L46
2,269
aiortc/aioice
aioice/ice.py
server_reflexive_candidate
async def server_reflexive_candidate(protocol, stun_server): """ Query STUN server to obtain a server-reflexive candidate. """ # lookup address loop = asyncio.get_event_loop() stun_server = ( await loop.run_in_executor(None, socket.gethostbyname, stun_server[0]), stun_server[1]) # perform STUN query request = stun.Message(message_method=stun.Method.BINDING, message_class=stun.Class.REQUEST) response, _ = await protocol.request(request, stun_server) local_candidate = protocol.local_candidate return Candidate( foundation=candidate_foundation('srflx', 'udp', local_candidate.host), component=local_candidate.component, transport=local_candidate.transport, priority=candidate_priority(local_candidate.component, 'srflx'), host=response.attributes['XOR-MAPPED-ADDRESS'][0], port=response.attributes['XOR-MAPPED-ADDRESS'][1], type='srflx', related_address=local_candidate.host, related_port=local_candidate.port)
python
async def server_reflexive_candidate(protocol, stun_server): """ Query STUN server to obtain a server-reflexive candidate. """ # lookup address loop = asyncio.get_event_loop() stun_server = ( await loop.run_in_executor(None, socket.gethostbyname, stun_server[0]), stun_server[1]) # perform STUN query request = stun.Message(message_method=stun.Method.BINDING, message_class=stun.Class.REQUEST) response, _ = await protocol.request(request, stun_server) local_candidate = protocol.local_candidate return Candidate( foundation=candidate_foundation('srflx', 'udp', local_candidate.host), component=local_candidate.component, transport=local_candidate.transport, priority=candidate_priority(local_candidate.component, 'srflx'), host=response.attributes['XOR-MAPPED-ADDRESS'][0], port=response.attributes['XOR-MAPPED-ADDRESS'][1], type='srflx', related_address=local_candidate.host, related_port=local_candidate.port)
[ "async", "def", "server_reflexive_candidate", "(", "protocol", ",", "stun_server", ")", ":", "# lookup address", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "stun_server", "=", "(", "await", "loop", ".", "run_in_executor", "(", "None", ",", "socket", ".", "gethostbyname", ",", "stun_server", "[", "0", "]", ")", ",", "stun_server", "[", "1", "]", ")", "# perform STUN query", "request", "=", "stun", ".", "Message", "(", "message_method", "=", "stun", ".", "Method", ".", "BINDING", ",", "message_class", "=", "stun", ".", "Class", ".", "REQUEST", ")", "response", ",", "_", "=", "await", "protocol", ".", "request", "(", "request", ",", "stun_server", ")", "local_candidate", "=", "protocol", ".", "local_candidate", "return", "Candidate", "(", "foundation", "=", "candidate_foundation", "(", "'srflx'", ",", "'udp'", ",", "local_candidate", ".", "host", ")", ",", "component", "=", "local_candidate", ".", "component", ",", "transport", "=", "local_candidate", ".", "transport", ",", "priority", "=", "candidate_priority", "(", "local_candidate", ".", "component", ",", "'srflx'", ")", ",", "host", "=", "response", ".", "attributes", "[", "'XOR-MAPPED-ADDRESS'", "]", "[", "0", "]", ",", "port", "=", "response", ".", "attributes", "[", "'XOR-MAPPED-ADDRESS'", "]", "[", "1", "]", ",", "type", "=", "'srflx'", ",", "related_address", "=", "local_candidate", ".", "host", ",", "related_port", "=", "local_candidate", ".", "port", ")" ]
Query STUN server to obtain a server-reflexive candidate.
[ "Query", "STUN", "server", "to", "obtain", "a", "server", "-", "reflexive", "candidate", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L49-L74
2,270
aiortc/aioice
aioice/ice.py
sort_candidate_pairs
def sort_candidate_pairs(pairs, ice_controlling): """ Sort a list of candidate pairs. """ def pair_priority(pair): return -candidate_pair_priority(pair.local_candidate, pair.remote_candidate, ice_controlling) pairs.sort(key=pair_priority)
python
def sort_candidate_pairs(pairs, ice_controlling): """ Sort a list of candidate pairs. """ def pair_priority(pair): return -candidate_pair_priority(pair.local_candidate, pair.remote_candidate, ice_controlling) pairs.sort(key=pair_priority)
[ "def", "sort_candidate_pairs", "(", "pairs", ",", "ice_controlling", ")", ":", "def", "pair_priority", "(", "pair", ")", ":", "return", "-", "candidate_pair_priority", "(", "pair", ".", "local_candidate", ",", "pair", ".", "remote_candidate", ",", "ice_controlling", ")", "pairs", ".", "sort", "(", "key", "=", "pair_priority", ")" ]
Sort a list of candidate pairs.
[ "Sort", "a", "list", "of", "candidate", "pairs", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L77-L86
2,271
aiortc/aioice
aioice/ice.py
StunProtocol.send_stun
def send_stun(self, message, addr): """ Send a STUN message. """ self.__log_debug('> %s %s', addr, message) self.transport.sendto(bytes(message), addr)
python
def send_stun(self, message, addr): """ Send a STUN message. """ self.__log_debug('> %s %s', addr, message) self.transport.sendto(bytes(message), addr)
[ "def", "send_stun", "(", "self", ",", "message", ",", "addr", ")", ":", "self", ".", "__log_debug", "(", "'> %s %s'", ",", "addr", ",", "message", ")", "self", ".", "transport", ".", "sendto", "(", "bytes", "(", "message", ")", ",", "addr", ")" ]
Send a STUN message.
[ "Send", "a", "STUN", "message", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L200-L205
2,272
aiortc/aioice
aioice/ice.py
Connection.add_remote_candidate
def add_remote_candidate(self, remote_candidate): """ Add a remote candidate or signal end-of-candidates. To signal end-of-candidates, pass `None`. """ if self._remote_candidates_end: raise ValueError('Cannot add remote candidate after end-of-candidates.') if remote_candidate is None: self._prune_components() self._remote_candidates_end = True return self._remote_candidates.append(remote_candidate) for protocol in self._protocols: if (protocol.local_candidate.can_pair_with(remote_candidate) and not self._find_pair(protocol, remote_candidate)): pair = CandidatePair(protocol, remote_candidate) self._check_list.append(pair) self.sort_check_list()
python
def add_remote_candidate(self, remote_candidate): """ Add a remote candidate or signal end-of-candidates. To signal end-of-candidates, pass `None`. """ if self._remote_candidates_end: raise ValueError('Cannot add remote candidate after end-of-candidates.') if remote_candidate is None: self._prune_components() self._remote_candidates_end = True return self._remote_candidates.append(remote_candidate) for protocol in self._protocols: if (protocol.local_candidate.can_pair_with(remote_candidate) and not self._find_pair(protocol, remote_candidate)): pair = CandidatePair(protocol, remote_candidate) self._check_list.append(pair) self.sort_check_list()
[ "def", "add_remote_candidate", "(", "self", ",", "remote_candidate", ")", ":", "if", "self", ".", "_remote_candidates_end", ":", "raise", "ValueError", "(", "'Cannot add remote candidate after end-of-candidates.'", ")", "if", "remote_candidate", "is", "None", ":", "self", ".", "_prune_components", "(", ")", "self", ".", "_remote_candidates_end", "=", "True", "return", "self", ".", "_remote_candidates", ".", "append", "(", "remote_candidate", ")", "for", "protocol", "in", "self", ".", "_protocols", ":", "if", "(", "protocol", ".", "local_candidate", ".", "can_pair_with", "(", "remote_candidate", ")", "and", "not", "self", ".", "_find_pair", "(", "protocol", ",", "remote_candidate", ")", ")", ":", "pair", "=", "CandidatePair", "(", "protocol", ",", "remote_candidate", ")", "self", ".", "_check_list", ".", "append", "(", "pair", ")", "self", ".", "sort_check_list", "(", ")" ]
Add a remote candidate or signal end-of-candidates. To signal end-of-candidates, pass `None`.
[ "Add", "a", "remote", "candidate", "or", "signal", "end", "-", "of", "-", "candidates", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L299-L319
2,273
aiortc/aioice
aioice/ice.py
Connection.gather_candidates
async def gather_candidates(self): """ Gather local candidates. You **must** call this coroutine before calling :meth:`connect`. """ if not self._local_candidates_start: self._local_candidates_start = True addresses = get_host_addresses(use_ipv4=self._use_ipv4, use_ipv6=self._use_ipv6) for component in self._components: self._local_candidates += await self.get_component_candidates( component=component, addresses=addresses) self._local_candidates_end = True
python
async def gather_candidates(self): """ Gather local candidates. You **must** call this coroutine before calling :meth:`connect`. """ if not self._local_candidates_start: self._local_candidates_start = True addresses = get_host_addresses(use_ipv4=self._use_ipv4, use_ipv6=self._use_ipv6) for component in self._components: self._local_candidates += await self.get_component_candidates( component=component, addresses=addresses) self._local_candidates_end = True
[ "async", "def", "gather_candidates", "(", "self", ")", ":", "if", "not", "self", ".", "_local_candidates_start", ":", "self", ".", "_local_candidates_start", "=", "True", "addresses", "=", "get_host_addresses", "(", "use_ipv4", "=", "self", ".", "_use_ipv4", ",", "use_ipv6", "=", "self", ".", "_use_ipv6", ")", "for", "component", "in", "self", ".", "_components", ":", "self", ".", "_local_candidates", "+=", "await", "self", ".", "get_component_candidates", "(", "component", "=", "component", ",", "addresses", "=", "addresses", ")", "self", ".", "_local_candidates_end", "=", "True" ]
Gather local candidates. You **must** call this coroutine before calling :meth:`connect`.
[ "Gather", "local", "candidates", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L321-L334
2,274
aiortc/aioice
aioice/ice.py
Connection.get_default_candidate
def get_default_candidate(self, component): """ Gets the default local candidate for the specified component. """ for candidate in sorted(self._local_candidates, key=lambda x: x.priority): if candidate.component == component: return candidate
python
def get_default_candidate(self, component): """ Gets the default local candidate for the specified component. """ for candidate in sorted(self._local_candidates, key=lambda x: x.priority): if candidate.component == component: return candidate
[ "def", "get_default_candidate", "(", "self", ",", "component", ")", ":", "for", "candidate", "in", "sorted", "(", "self", ".", "_local_candidates", ",", "key", "=", "lambda", "x", ":", "x", ".", "priority", ")", ":", "if", "candidate", ".", "component", "==", "component", ":", "return", "candidate" ]
Gets the default local candidate for the specified component.
[ "Gets", "the", "default", "local", "candidate", "for", "the", "specified", "component", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L336-L342
2,275
aiortc/aioice
aioice/ice.py
Connection.connect
async def connect(self): """ Perform ICE handshake. This coroutine returns if a candidate pair was successfuly nominated and raises an exception otherwise. """ if not self._local_candidates_end: raise ConnectionError('Local candidates gathering was not performed') if (self.remote_username is None or self.remote_password is None): raise ConnectionError('Remote username or password is missing') # 5.7.1. Forming Candidate Pairs for remote_candidate in self._remote_candidates: for protocol in self._protocols: if (protocol.local_candidate.can_pair_with(remote_candidate) and not self._find_pair(protocol, remote_candidate)): pair = CandidatePair(protocol, remote_candidate) self._check_list.append(pair) self.sort_check_list() self._unfreeze_initial() # handle early checks for check in self._early_checks: self.check_incoming(*check) self._early_checks = [] # perform checks while True: if not self.check_periodic(): break await asyncio.sleep(0.02) # wait for completion if self._check_list: res = await self._check_list_state.get() else: res = ICE_FAILED # cancel remaining checks for check in self._check_list: if check.handle: check.handle.cancel() if res != ICE_COMPLETED: raise ConnectionError('ICE negotiation failed') # start consent freshness tests self._query_consent_handle = asyncio.ensure_future(self.query_consent())
python
async def connect(self): """ Perform ICE handshake. This coroutine returns if a candidate pair was successfuly nominated and raises an exception otherwise. """ if not self._local_candidates_end: raise ConnectionError('Local candidates gathering was not performed') if (self.remote_username is None or self.remote_password is None): raise ConnectionError('Remote username or password is missing') # 5.7.1. Forming Candidate Pairs for remote_candidate in self._remote_candidates: for protocol in self._protocols: if (protocol.local_candidate.can_pair_with(remote_candidate) and not self._find_pair(protocol, remote_candidate)): pair = CandidatePair(protocol, remote_candidate) self._check_list.append(pair) self.sort_check_list() self._unfreeze_initial() # handle early checks for check in self._early_checks: self.check_incoming(*check) self._early_checks = [] # perform checks while True: if not self.check_periodic(): break await asyncio.sleep(0.02) # wait for completion if self._check_list: res = await self._check_list_state.get() else: res = ICE_FAILED # cancel remaining checks for check in self._check_list: if check.handle: check.handle.cancel() if res != ICE_COMPLETED: raise ConnectionError('ICE negotiation failed') # start consent freshness tests self._query_consent_handle = asyncio.ensure_future(self.query_consent())
[ "async", "def", "connect", "(", "self", ")", ":", "if", "not", "self", ".", "_local_candidates_end", ":", "raise", "ConnectionError", "(", "'Local candidates gathering was not performed'", ")", "if", "(", "self", ".", "remote_username", "is", "None", "or", "self", ".", "remote_password", "is", "None", ")", ":", "raise", "ConnectionError", "(", "'Remote username or password is missing'", ")", "# 5.7.1. Forming Candidate Pairs", "for", "remote_candidate", "in", "self", ".", "_remote_candidates", ":", "for", "protocol", "in", "self", ".", "_protocols", ":", "if", "(", "protocol", ".", "local_candidate", ".", "can_pair_with", "(", "remote_candidate", ")", "and", "not", "self", ".", "_find_pair", "(", "protocol", ",", "remote_candidate", ")", ")", ":", "pair", "=", "CandidatePair", "(", "protocol", ",", "remote_candidate", ")", "self", ".", "_check_list", ".", "append", "(", "pair", ")", "self", ".", "sort_check_list", "(", ")", "self", ".", "_unfreeze_initial", "(", ")", "# handle early checks", "for", "check", "in", "self", ".", "_early_checks", ":", "self", ".", "check_incoming", "(", "*", "check", ")", "self", ".", "_early_checks", "=", "[", "]", "# perform checks", "while", "True", ":", "if", "not", "self", ".", "check_periodic", "(", ")", ":", "break", "await", "asyncio", ".", "sleep", "(", "0.02", ")", "# wait for completion", "if", "self", ".", "_check_list", ":", "res", "=", "await", "self", ".", "_check_list_state", ".", "get", "(", ")", "else", ":", "res", "=", "ICE_FAILED", "# cancel remaining checks", "for", "check", "in", "self", ".", "_check_list", ":", "if", "check", ".", "handle", ":", "check", ".", "handle", ".", "cancel", "(", ")", "if", "res", "!=", "ICE_COMPLETED", ":", "raise", "ConnectionError", "(", "'ICE negotiation failed'", ")", "# start consent freshness tests", "self", ".", "_query_consent_handle", "=", "asyncio", ".", "ensure_future", "(", "self", ".", "query_consent", "(", ")", ")" ]
Perform ICE handshake. This coroutine returns if a candidate pair was successfuly nominated and raises an exception otherwise.
[ "Perform", "ICE", "handshake", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L344-L395
2,276
aiortc/aioice
aioice/ice.py
Connection.recvfrom
async def recvfrom(self): """ Receive the next datagram. The return value is a `(bytes, component)` tuple where `bytes` is a bytes object representing the data received and `component` is the component on which the data was received. If the connection is not established, a `ConnectionError` is raised. """ if not len(self._nominated): raise ConnectionError('Cannot receive data, not connected') result = await self._queue.get() if result[0] is None: raise ConnectionError('Connection lost while receiving data') return result
python
async def recvfrom(self): """ Receive the next datagram. The return value is a `(bytes, component)` tuple where `bytes` is a bytes object representing the data received and `component` is the component on which the data was received. If the connection is not established, a `ConnectionError` is raised. """ if not len(self._nominated): raise ConnectionError('Cannot receive data, not connected') result = await self._queue.get() if result[0] is None: raise ConnectionError('Connection lost while receiving data') return result
[ "async", "def", "recvfrom", "(", "self", ")", ":", "if", "not", "len", "(", "self", ".", "_nominated", ")", ":", "raise", "ConnectionError", "(", "'Cannot receive data, not connected'", ")", "result", "=", "await", "self", ".", "_queue", ".", "get", "(", ")", "if", "result", "[", "0", "]", "is", "None", ":", "raise", "ConnectionError", "(", "'Connection lost while receiving data'", ")", "return", "result" ]
Receive the next datagram. The return value is a `(bytes, component)` tuple where `bytes` is a bytes object representing the data received and `component` is the component on which the data was received. If the connection is not established, a `ConnectionError` is raised.
[ "Receive", "the", "next", "datagram", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L430-L446
2,277
aiortc/aioice
aioice/ice.py
Connection.sendto
async def sendto(self, data, component): """ Send a datagram on the specified component. If the connection is not established, a `ConnectionError` is raised. """ active_pair = self._nominated.get(component) if active_pair: await active_pair.protocol.send_data(data, active_pair.remote_addr) else: raise ConnectionError('Cannot send data, not connected')
python
async def sendto(self, data, component): """ Send a datagram on the specified component. If the connection is not established, a `ConnectionError` is raised. """ active_pair = self._nominated.get(component) if active_pair: await active_pair.protocol.send_data(data, active_pair.remote_addr) else: raise ConnectionError('Cannot send data, not connected')
[ "async", "def", "sendto", "(", "self", ",", "data", ",", "component", ")", ":", "active_pair", "=", "self", ".", "_nominated", ".", "get", "(", "component", ")", "if", "active_pair", ":", "await", "active_pair", ".", "protocol", ".", "send_data", "(", "data", ",", "active_pair", ".", "remote_addr", ")", "else", ":", "raise", "ConnectionError", "(", "'Cannot send data, not connected'", ")" ]
Send a datagram on the specified component. If the connection is not established, a `ConnectionError` is raised.
[ "Send", "a", "datagram", "on", "the", "specified", "component", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L456-L466
2,278
aiortc/aioice
aioice/ice.py
Connection.set_selected_pair
def set_selected_pair(self, component, local_foundation, remote_foundation): """ Force the selected candidate pair. If the remote party does not support ICE, you should using this instead of calling :meth:`connect`. """ # find local candidate protocol = None for p in self._protocols: if (p.local_candidate.component == component and p.local_candidate.foundation == local_foundation): protocol = p break # find remote candidate remote_candidate = None for c in self._remote_candidates: if c.component == component and c.foundation == remote_foundation: remote_candidate = c assert (protocol and remote_candidate) self._nominated[component] = CandidatePair(protocol, remote_candidate)
python
def set_selected_pair(self, component, local_foundation, remote_foundation): """ Force the selected candidate pair. If the remote party does not support ICE, you should using this instead of calling :meth:`connect`. """ # find local candidate protocol = None for p in self._protocols: if (p.local_candidate.component == component and p.local_candidate.foundation == local_foundation): protocol = p break # find remote candidate remote_candidate = None for c in self._remote_candidates: if c.component == component and c.foundation == remote_foundation: remote_candidate = c assert (protocol and remote_candidate) self._nominated[component] = CandidatePair(protocol, remote_candidate)
[ "def", "set_selected_pair", "(", "self", ",", "component", ",", "local_foundation", ",", "remote_foundation", ")", ":", "# find local candidate", "protocol", "=", "None", "for", "p", "in", "self", ".", "_protocols", ":", "if", "(", "p", ".", "local_candidate", ".", "component", "==", "component", "and", "p", ".", "local_candidate", ".", "foundation", "==", "local_foundation", ")", ":", "protocol", "=", "p", "break", "# find remote candidate", "remote_candidate", "=", "None", "for", "c", "in", "self", ".", "_remote_candidates", ":", "if", "c", ".", "component", "==", "component", "and", "c", ".", "foundation", "==", "remote_foundation", ":", "remote_candidate", "=", "c", "assert", "(", "protocol", "and", "remote_candidate", ")", "self", ".", "_nominated", "[", "component", "]", "=", "CandidatePair", "(", "protocol", ",", "remote_candidate", ")" ]
Force the selected candidate pair. If the remote party does not support ICE, you should using this instead of calling :meth:`connect`.
[ "Force", "the", "selected", "candidate", "pair", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L468-L490
2,279
aiortc/aioice
aioice/ice.py
Connection.check_incoming
def check_incoming(self, message, addr, protocol): """ Handle a succesful incoming check. """ component = protocol.local_candidate.component # find remote candidate remote_candidate = None for c in self._remote_candidates: if c.host == addr[0] and c.port == addr[1]: remote_candidate = c assert remote_candidate.component == component break if remote_candidate is None: # 7.2.1.3. Learning Peer Reflexive Candidates remote_candidate = Candidate( foundation=random_string(10), component=component, transport='udp', priority=message.attributes['PRIORITY'], host=addr[0], port=addr[1], type='prflx') self._remote_candidates.append(remote_candidate) self.__log_info('Discovered peer reflexive candidate %s', remote_candidate) # find pair pair = self._find_pair(protocol, remote_candidate) if pair is None: pair = CandidatePair(protocol, remote_candidate) pair.state = CandidatePair.State.WAITING self._check_list.append(pair) self.sort_check_list() # triggered check if pair.state in [CandidatePair.State.WAITING, CandidatePair.State.FAILED]: pair.handle = asyncio.ensure_future(self.check_start(pair)) # 7.2.1.5. Updating the Nominated Flag if 'USE-CANDIDATE' in message.attributes and not self.ice_controlling: pair.remote_nominated = True if pair.state == CandidatePair.State.SUCCEEDED: pair.nominated = True self.check_complete(pair)
python
def check_incoming(self, message, addr, protocol): """ Handle a succesful incoming check. """ component = protocol.local_candidate.component # find remote candidate remote_candidate = None for c in self._remote_candidates: if c.host == addr[0] and c.port == addr[1]: remote_candidate = c assert remote_candidate.component == component break if remote_candidate is None: # 7.2.1.3. Learning Peer Reflexive Candidates remote_candidate = Candidate( foundation=random_string(10), component=component, transport='udp', priority=message.attributes['PRIORITY'], host=addr[0], port=addr[1], type='prflx') self._remote_candidates.append(remote_candidate) self.__log_info('Discovered peer reflexive candidate %s', remote_candidate) # find pair pair = self._find_pair(protocol, remote_candidate) if pair is None: pair = CandidatePair(protocol, remote_candidate) pair.state = CandidatePair.State.WAITING self._check_list.append(pair) self.sort_check_list() # triggered check if pair.state in [CandidatePair.State.WAITING, CandidatePair.State.FAILED]: pair.handle = asyncio.ensure_future(self.check_start(pair)) # 7.2.1.5. Updating the Nominated Flag if 'USE-CANDIDATE' in message.attributes and not self.ice_controlling: pair.remote_nominated = True if pair.state == CandidatePair.State.SUCCEEDED: pair.nominated = True self.check_complete(pair)
[ "def", "check_incoming", "(", "self", ",", "message", ",", "addr", ",", "protocol", ")", ":", "component", "=", "protocol", ".", "local_candidate", ".", "component", "# find remote candidate", "remote_candidate", "=", "None", "for", "c", "in", "self", ".", "_remote_candidates", ":", "if", "c", ".", "host", "==", "addr", "[", "0", "]", "and", "c", ".", "port", "==", "addr", "[", "1", "]", ":", "remote_candidate", "=", "c", "assert", "remote_candidate", ".", "component", "==", "component", "break", "if", "remote_candidate", "is", "None", ":", "# 7.2.1.3. Learning Peer Reflexive Candidates", "remote_candidate", "=", "Candidate", "(", "foundation", "=", "random_string", "(", "10", ")", ",", "component", "=", "component", ",", "transport", "=", "'udp'", ",", "priority", "=", "message", ".", "attributes", "[", "'PRIORITY'", "]", ",", "host", "=", "addr", "[", "0", "]", ",", "port", "=", "addr", "[", "1", "]", ",", "type", "=", "'prflx'", ")", "self", ".", "_remote_candidates", ".", "append", "(", "remote_candidate", ")", "self", ".", "__log_info", "(", "'Discovered peer reflexive candidate %s'", ",", "remote_candidate", ")", "# find pair", "pair", "=", "self", ".", "_find_pair", "(", "protocol", ",", "remote_candidate", ")", "if", "pair", "is", "None", ":", "pair", "=", "CandidatePair", "(", "protocol", ",", "remote_candidate", ")", "pair", ".", "state", "=", "CandidatePair", ".", "State", ".", "WAITING", "self", ".", "_check_list", ".", "append", "(", "pair", ")", "self", ".", "sort_check_list", "(", ")", "# triggered check", "if", "pair", ".", "state", "in", "[", "CandidatePair", ".", "State", ".", "WAITING", ",", "CandidatePair", ".", "State", ".", "FAILED", "]", ":", "pair", ".", "handle", "=", "asyncio", ".", "ensure_future", "(", "self", ".", "check_start", "(", "pair", ")", ")", "# 7.2.1.5. Updating the Nominated Flag", "if", "'USE-CANDIDATE'", "in", "message", ".", "attributes", "and", "not", "self", ".", "ice_controlling", ":", "pair", ".", "remote_nominated", "=", "True", "if", "pair", ".", "state", "==", "CandidatePair", ".", "State", ".", "SUCCEEDED", ":", "pair", ".", "nominated", "=", "True", "self", ".", "check_complete", "(", "pair", ")" ]
Handle a succesful incoming check.
[ "Handle", "a", "succesful", "incoming", "check", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L554-L598
2,280
aiortc/aioice
aioice/ice.py
Connection.check_start
async def check_start(self, pair): """ Starts a check. """ self.check_state(pair, CandidatePair.State.IN_PROGRESS) request = self.build_request(pair) try: response, addr = await pair.protocol.request( request, pair.remote_addr, integrity_key=self.remote_password.encode('utf8')) except exceptions.TransactionError as exc: # 7.1.3.1. Failure Cases if exc.response and exc.response.attributes.get('ERROR-CODE', (None, None))[0] == 487: if 'ICE-CONTROLLING' in request.attributes: self.switch_role(ice_controlling=False) elif 'ICE-CONTROLLED' in request.attributes: self.switch_role(ice_controlling=True) return await self.check_start(pair) else: self.check_state(pair, CandidatePair.State.FAILED) self.check_complete(pair) return # check remote address matches if addr != pair.remote_addr: self.__log_info('Check %s failed : source address mismatch', pair) self.check_state(pair, CandidatePair.State.FAILED) self.check_complete(pair) return # success self.check_state(pair, CandidatePair.State.SUCCEEDED) if self.ice_controlling or pair.remote_nominated: pair.nominated = True self.check_complete(pair)
python
async def check_start(self, pair): """ Starts a check. """ self.check_state(pair, CandidatePair.State.IN_PROGRESS) request = self.build_request(pair) try: response, addr = await pair.protocol.request( request, pair.remote_addr, integrity_key=self.remote_password.encode('utf8')) except exceptions.TransactionError as exc: # 7.1.3.1. Failure Cases if exc.response and exc.response.attributes.get('ERROR-CODE', (None, None))[0] == 487: if 'ICE-CONTROLLING' in request.attributes: self.switch_role(ice_controlling=False) elif 'ICE-CONTROLLED' in request.attributes: self.switch_role(ice_controlling=True) return await self.check_start(pair) else: self.check_state(pair, CandidatePair.State.FAILED) self.check_complete(pair) return # check remote address matches if addr != pair.remote_addr: self.__log_info('Check %s failed : source address mismatch', pair) self.check_state(pair, CandidatePair.State.FAILED) self.check_complete(pair) return # success self.check_state(pair, CandidatePair.State.SUCCEEDED) if self.ice_controlling or pair.remote_nominated: pair.nominated = True self.check_complete(pair)
[ "async", "def", "check_start", "(", "self", ",", "pair", ")", ":", "self", ".", "check_state", "(", "pair", ",", "CandidatePair", ".", "State", ".", "IN_PROGRESS", ")", "request", "=", "self", ".", "build_request", "(", "pair", ")", "try", ":", "response", ",", "addr", "=", "await", "pair", ".", "protocol", ".", "request", "(", "request", ",", "pair", ".", "remote_addr", ",", "integrity_key", "=", "self", ".", "remote_password", ".", "encode", "(", "'utf8'", ")", ")", "except", "exceptions", ".", "TransactionError", "as", "exc", ":", "# 7.1.3.1. Failure Cases", "if", "exc", ".", "response", "and", "exc", ".", "response", ".", "attributes", ".", "get", "(", "'ERROR-CODE'", ",", "(", "None", ",", "None", ")", ")", "[", "0", "]", "==", "487", ":", "if", "'ICE-CONTROLLING'", "in", "request", ".", "attributes", ":", "self", ".", "switch_role", "(", "ice_controlling", "=", "False", ")", "elif", "'ICE-CONTROLLED'", "in", "request", ".", "attributes", ":", "self", ".", "switch_role", "(", "ice_controlling", "=", "True", ")", "return", "await", "self", ".", "check_start", "(", "pair", ")", "else", ":", "self", ".", "check_state", "(", "pair", ",", "CandidatePair", ".", "State", ".", "FAILED", ")", "self", ".", "check_complete", "(", "pair", ")", "return", "# check remote address matches", "if", "addr", "!=", "pair", ".", "remote_addr", ":", "self", ".", "__log_info", "(", "'Check %s failed : source address mismatch'", ",", "pair", ")", "self", ".", "check_state", "(", "pair", ",", "CandidatePair", ".", "State", ".", "FAILED", ")", "self", ".", "check_complete", "(", "pair", ")", "return", "# success", "self", ".", "check_state", "(", "pair", ",", "CandidatePair", ".", "State", ".", "SUCCEEDED", ")", "if", "self", ".", "ice_controlling", "or", "pair", ".", "remote_nominated", ":", "pair", ".", "nominated", "=", "True", "self", ".", "check_complete", "(", "pair", ")" ]
Starts a check.
[ "Starts", "a", "check", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L619-L654
2,281
aiortc/aioice
aioice/ice.py
Connection.check_state
def check_state(self, pair, state): """ Updates the state of a check. """ self.__log_info('Check %s %s -> %s', pair, pair.state, state) pair.state = state
python
def check_state(self, pair, state): """ Updates the state of a check. """ self.__log_info('Check %s %s -> %s', pair, pair.state, state) pair.state = state
[ "def", "check_state", "(", "self", ",", "pair", ",", "state", ")", ":", "self", ".", "__log_info", "(", "'Check %s %s -> %s'", ",", "pair", ",", "pair", ".", "state", ",", "state", ")", "pair", ".", "state", "=", "state" ]
Updates the state of a check.
[ "Updates", "the", "state", "of", "a", "check", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L656-L661
2,282
aiortc/aioice
aioice/ice.py
Connection._find_pair
def _find_pair(self, protocol, remote_candidate): """ Find a candidate pair in the check list. """ for pair in self._check_list: if (pair.protocol == protocol and pair.remote_candidate == remote_candidate): return pair return None
python
def _find_pair(self, protocol, remote_candidate): """ Find a candidate pair in the check list. """ for pair in self._check_list: if (pair.protocol == protocol and pair.remote_candidate == remote_candidate): return pair return None
[ "def", "_find_pair", "(", "self", ",", "protocol", ",", "remote_candidate", ")", ":", "for", "pair", "in", "self", ".", "_check_list", ":", "if", "(", "pair", ".", "protocol", "==", "protocol", "and", "pair", ".", "remote_candidate", "==", "remote_candidate", ")", ":", "return", "pair", "return", "None" ]
Find a candidate pair in the check list.
[ "Find", "a", "candidate", "pair", "in", "the", "check", "list", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L663-L670
2,283
aiortc/aioice
aioice/ice.py
Connection._prune_components
def _prune_components(self): """ Remove components for which the remote party did not provide any candidates. This can only be determined after end-of-candidates. """ seen_components = set(map(lambda x: x.component, self._remote_candidates)) missing_components = self._components - seen_components if missing_components: self.__log_info('Components %s have no candidate pairs' % missing_components) self._components = seen_components
python
def _prune_components(self): """ Remove components for which the remote party did not provide any candidates. This can only be determined after end-of-candidates. """ seen_components = set(map(lambda x: x.component, self._remote_candidates)) missing_components = self._components - seen_components if missing_components: self.__log_info('Components %s have no candidate pairs' % missing_components) self._components = seen_components
[ "def", "_prune_components", "(", "self", ")", ":", "seen_components", "=", "set", "(", "map", "(", "lambda", "x", ":", "x", ".", "component", ",", "self", ".", "_remote_candidates", ")", ")", "missing_components", "=", "self", ".", "_components", "-", "seen_components", "if", "missing_components", ":", "self", ".", "__log_info", "(", "'Components %s have no candidate pairs'", "%", "missing_components", ")", "self", ".", "_components", "=", "seen_components" ]
Remove components for which the remote party did not provide any candidates. This can only be determined after end-of-candidates.
[ "Remove", "components", "for", "which", "the", "remote", "party", "did", "not", "provide", "any", "candidates", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L740-L750
2,284
aiortc/aioice
aioice/stun.py
parse_message
def parse_message(data, integrity_key=None): """ Parses a STUN message. If the ``integrity_key`` parameter is given, the message's HMAC will be verified. """ if len(data) < HEADER_LENGTH: raise ValueError('STUN message length is less than 20 bytes') message_type, length, cookie, transaction_id = unpack('!HHI12s', data[0:HEADER_LENGTH]) if len(data) != HEADER_LENGTH + length: raise ValueError('STUN message length does not match') attributes = OrderedDict() pos = HEADER_LENGTH while pos <= len(data) - 4: attr_type, attr_len = unpack('!HH', data[pos:pos + 4]) v = data[pos + 4:pos + 4 + attr_len] pad_len = 4 * ((attr_len + 3) // 4) - attr_len if attr_type in ATTRIBUTES_BY_TYPE: _, attr_name, attr_pack, attr_unpack = ATTRIBUTES_BY_TYPE[attr_type] if attr_unpack == unpack_xor_address: attributes[attr_name] = attr_unpack(v, transaction_id=transaction_id) else: attributes[attr_name] = attr_unpack(v) if attr_name == 'FINGERPRINT': if attributes[attr_name] != message_fingerprint(data[0:pos]): raise ValueError('STUN message fingerprint does not match') elif attr_name == 'MESSAGE-INTEGRITY': if (integrity_key is not None and attributes[attr_name] != message_integrity(data[0:pos], integrity_key)): raise ValueError('STUN message integrity does not match') pos += 4 + attr_len + pad_len return Message( message_method=message_type & 0x3eef, message_class=message_type & 0x0110, transaction_id=transaction_id, attributes=attributes)
python
def parse_message(data, integrity_key=None): """ Parses a STUN message. If the ``integrity_key`` parameter is given, the message's HMAC will be verified. """ if len(data) < HEADER_LENGTH: raise ValueError('STUN message length is less than 20 bytes') message_type, length, cookie, transaction_id = unpack('!HHI12s', data[0:HEADER_LENGTH]) if len(data) != HEADER_LENGTH + length: raise ValueError('STUN message length does not match') attributes = OrderedDict() pos = HEADER_LENGTH while pos <= len(data) - 4: attr_type, attr_len = unpack('!HH', data[pos:pos + 4]) v = data[pos + 4:pos + 4 + attr_len] pad_len = 4 * ((attr_len + 3) // 4) - attr_len if attr_type in ATTRIBUTES_BY_TYPE: _, attr_name, attr_pack, attr_unpack = ATTRIBUTES_BY_TYPE[attr_type] if attr_unpack == unpack_xor_address: attributes[attr_name] = attr_unpack(v, transaction_id=transaction_id) else: attributes[attr_name] = attr_unpack(v) if attr_name == 'FINGERPRINT': if attributes[attr_name] != message_fingerprint(data[0:pos]): raise ValueError('STUN message fingerprint does not match') elif attr_name == 'MESSAGE-INTEGRITY': if (integrity_key is not None and attributes[attr_name] != message_integrity(data[0:pos], integrity_key)): raise ValueError('STUN message integrity does not match') pos += 4 + attr_len + pad_len return Message( message_method=message_type & 0x3eef, message_class=message_type & 0x0110, transaction_id=transaction_id, attributes=attributes)
[ "def", "parse_message", "(", "data", ",", "integrity_key", "=", "None", ")", ":", "if", "len", "(", "data", ")", "<", "HEADER_LENGTH", ":", "raise", "ValueError", "(", "'STUN message length is less than 20 bytes'", ")", "message_type", ",", "length", ",", "cookie", ",", "transaction_id", "=", "unpack", "(", "'!HHI12s'", ",", "data", "[", "0", ":", "HEADER_LENGTH", "]", ")", "if", "len", "(", "data", ")", "!=", "HEADER_LENGTH", "+", "length", ":", "raise", "ValueError", "(", "'STUN message length does not match'", ")", "attributes", "=", "OrderedDict", "(", ")", "pos", "=", "HEADER_LENGTH", "while", "pos", "<=", "len", "(", "data", ")", "-", "4", ":", "attr_type", ",", "attr_len", "=", "unpack", "(", "'!HH'", ",", "data", "[", "pos", ":", "pos", "+", "4", "]", ")", "v", "=", "data", "[", "pos", "+", "4", ":", "pos", "+", "4", "+", "attr_len", "]", "pad_len", "=", "4", "*", "(", "(", "attr_len", "+", "3", ")", "//", "4", ")", "-", "attr_len", "if", "attr_type", "in", "ATTRIBUTES_BY_TYPE", ":", "_", ",", "attr_name", ",", "attr_pack", ",", "attr_unpack", "=", "ATTRIBUTES_BY_TYPE", "[", "attr_type", "]", "if", "attr_unpack", "==", "unpack_xor_address", ":", "attributes", "[", "attr_name", "]", "=", "attr_unpack", "(", "v", ",", "transaction_id", "=", "transaction_id", ")", "else", ":", "attributes", "[", "attr_name", "]", "=", "attr_unpack", "(", "v", ")", "if", "attr_name", "==", "'FINGERPRINT'", ":", "if", "attributes", "[", "attr_name", "]", "!=", "message_fingerprint", "(", "data", "[", "0", ":", "pos", "]", ")", ":", "raise", "ValueError", "(", "'STUN message fingerprint does not match'", ")", "elif", "attr_name", "==", "'MESSAGE-INTEGRITY'", ":", "if", "(", "integrity_key", "is", "not", "None", "and", "attributes", "[", "attr_name", "]", "!=", "message_integrity", "(", "data", "[", "0", ":", "pos", "]", ",", "integrity_key", ")", ")", ":", "raise", "ValueError", "(", "'STUN message integrity does not match'", ")", "pos", "+=", "4", "+", "attr_len", "+", "pad_len", "return", "Message", "(", "message_method", "=", "message_type", "&", "0x3eef", ",", "message_class", "=", "message_type", "&", "0x0110", ",", "transaction_id", "=", "transaction_id", ",", "attributes", "=", "attributes", ")" ]
Parses a STUN message. If the ``integrity_key`` parameter is given, the message's HMAC will be verified.
[ "Parses", "a", "STUN", "message", "." ]
a04d810d94ec2d00eca9ce01eacca74b3b086616
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/stun.py#L268-L306
2,285
cole/aiosmtplib
src/aiosmtplib/protocol.py
SMTPProtocol.connection_made
def connection_made(self, transport: asyncio.BaseTransport) -> None: """ Modified ``connection_made`` that supports upgrading our transport in place using STARTTLS. We set the _transport directly on the StreamReader, rather than calling set_transport (which will raise an AssertionError on upgrade). """ if self._stream_reader is None: raise SMTPServerDisconnected("Client not connected") self._stream_reader._transport = transport # type: ignore self._over_ssl = transport.get_extra_info("sslcontext") is not None self._stream_writer = asyncio.StreamWriter( transport, self, self._stream_reader, self._loop ) self._client_connected_cb( # type: ignore self._stream_reader, self._stream_writer )
python
def connection_made(self, transport: asyncio.BaseTransport) -> None: """ Modified ``connection_made`` that supports upgrading our transport in place using STARTTLS. We set the _transport directly on the StreamReader, rather than calling set_transport (which will raise an AssertionError on upgrade). """ if self._stream_reader is None: raise SMTPServerDisconnected("Client not connected") self._stream_reader._transport = transport # type: ignore self._over_ssl = transport.get_extra_info("sslcontext") is not None self._stream_writer = asyncio.StreamWriter( transport, self, self._stream_reader, self._loop ) self._client_connected_cb( # type: ignore self._stream_reader, self._stream_writer )
[ "def", "connection_made", "(", "self", ",", "transport", ":", "asyncio", ".", "BaseTransport", ")", "->", "None", ":", "if", "self", ".", "_stream_reader", "is", "None", ":", "raise", "SMTPServerDisconnected", "(", "\"Client not connected\"", ")", "self", ".", "_stream_reader", ".", "_transport", "=", "transport", "# type: ignore", "self", ".", "_over_ssl", "=", "transport", ".", "get_extra_info", "(", "\"sslcontext\"", ")", "is", "not", "None", "self", ".", "_stream_writer", "=", "asyncio", ".", "StreamWriter", "(", "transport", ",", "self", ",", "self", ".", "_stream_reader", ",", "self", ".", "_loop", ")", "self", ".", "_client_connected_cb", "(", "# type: ignore", "self", ".", "_stream_reader", ",", "self", ".", "_stream_writer", ")" ]
Modified ``connection_made`` that supports upgrading our transport in place using STARTTLS. We set the _transport directly on the StreamReader, rather than calling set_transport (which will raise an AssertionError on upgrade).
[ "Modified", "connection_made", "that", "supports", "upgrading", "our", "transport", "in", "place", "using", "STARTTLS", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/protocol.py#L50-L68
2,286
cole/aiosmtplib
src/aiosmtplib/protocol.py
SMTPProtocol.upgrade_transport
def upgrade_transport( self, context: ssl.SSLContext, server_hostname: str = None, waiter: Awaitable = None, ) -> SSLProtocol: """ Upgrade our transport to TLS in place. """ if self._over_ssl: raise RuntimeError("Already using TLS.") if self._stream_reader is None or self._stream_writer is None: raise SMTPServerDisconnected("Client not connected") transport = self._stream_reader._transport # type: ignore tls_protocol = SSLProtocol( self._loop, self, context, waiter, server_side=False, server_hostname=server_hostname, ) app_transport = tls_protocol._app_transport # Use set_protocol if we can if hasattr(transport, "set_protocol"): transport.set_protocol(tls_protocol) else: transport._protocol = tls_protocol self._stream_reader._transport = app_transport # type: ignore self._stream_writer._transport = app_transport # type: ignore tls_protocol.connection_made(transport) self._over_ssl = True # type: bool return tls_protocol
python
def upgrade_transport( self, context: ssl.SSLContext, server_hostname: str = None, waiter: Awaitable = None, ) -> SSLProtocol: """ Upgrade our transport to TLS in place. """ if self._over_ssl: raise RuntimeError("Already using TLS.") if self._stream_reader is None or self._stream_writer is None: raise SMTPServerDisconnected("Client not connected") transport = self._stream_reader._transport # type: ignore tls_protocol = SSLProtocol( self._loop, self, context, waiter, server_side=False, server_hostname=server_hostname, ) app_transport = tls_protocol._app_transport # Use set_protocol if we can if hasattr(transport, "set_protocol"): transport.set_protocol(tls_protocol) else: transport._protocol = tls_protocol self._stream_reader._transport = app_transport # type: ignore self._stream_writer._transport = app_transport # type: ignore tls_protocol.connection_made(transport) self._over_ssl = True # type: bool return tls_protocol
[ "def", "upgrade_transport", "(", "self", ",", "context", ":", "ssl", ".", "SSLContext", ",", "server_hostname", ":", "str", "=", "None", ",", "waiter", ":", "Awaitable", "=", "None", ",", ")", "->", "SSLProtocol", ":", "if", "self", ".", "_over_ssl", ":", "raise", "RuntimeError", "(", "\"Already using TLS.\"", ")", "if", "self", ".", "_stream_reader", "is", "None", "or", "self", ".", "_stream_writer", "is", "None", ":", "raise", "SMTPServerDisconnected", "(", "\"Client not connected\"", ")", "transport", "=", "self", ".", "_stream_reader", ".", "_transport", "# type: ignore", "tls_protocol", "=", "SSLProtocol", "(", "self", ".", "_loop", ",", "self", ",", "context", ",", "waiter", ",", "server_side", "=", "False", ",", "server_hostname", "=", "server_hostname", ",", ")", "app_transport", "=", "tls_protocol", ".", "_app_transport", "# Use set_protocol if we can", "if", "hasattr", "(", "transport", ",", "\"set_protocol\"", ")", ":", "transport", ".", "set_protocol", "(", "tls_protocol", ")", "else", ":", "transport", ".", "_protocol", "=", "tls_protocol", "self", ".", "_stream_reader", ".", "_transport", "=", "app_transport", "# type: ignore", "self", ".", "_stream_writer", ".", "_transport", "=", "app_transport", "# type: ignore", "tls_protocol", ".", "connection_made", "(", "transport", ")", "self", ".", "_over_ssl", "=", "True", "# type: bool", "return", "tls_protocol" ]
Upgrade our transport to TLS in place.
[ "Upgrade", "our", "transport", "to", "TLS", "in", "place", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/protocol.py#L70-L109
2,287
cole/aiosmtplib
src/aiosmtplib/protocol.py
SMTPProtocol.read_response
async def read_response(self, timeout: NumType = None) -> SMTPResponse: """ Get a status reponse from the server. Returns an SMTPResponse namedtuple consisting of: - server response code (e.g. 250, or such, if all goes well) - server response string corresponding to response code (multiline responses are converted to a single, multiline string). """ if self._stream_reader is None: raise SMTPServerDisconnected("Client not connected") code = None response_lines = [] while True: async with self._io_lock: line = await self._readline(timeout=timeout) try: code = int(line[:3]) except ValueError: pass message = line[4:].strip(b" \t\r\n").decode("utf-8", "surrogateescape") response_lines.append(message) if line[3:4] != b"-": break full_message = "\n".join(response_lines) if code is None: raise SMTPResponseException( SMTPStatus.invalid_response.value, "Malformed SMTP response: {}".format(full_message), ) return SMTPResponse(code, full_message)
python
async def read_response(self, timeout: NumType = None) -> SMTPResponse: """ Get a status reponse from the server. Returns an SMTPResponse namedtuple consisting of: - server response code (e.g. 250, or such, if all goes well) - server response string corresponding to response code (multiline responses are converted to a single, multiline string). """ if self._stream_reader is None: raise SMTPServerDisconnected("Client not connected") code = None response_lines = [] while True: async with self._io_lock: line = await self._readline(timeout=timeout) try: code = int(line[:3]) except ValueError: pass message = line[4:].strip(b" \t\r\n").decode("utf-8", "surrogateescape") response_lines.append(message) if line[3:4] != b"-": break full_message = "\n".join(response_lines) if code is None: raise SMTPResponseException( SMTPStatus.invalid_response.value, "Malformed SMTP response: {}".format(full_message), ) return SMTPResponse(code, full_message)
[ "async", "def", "read_response", "(", "self", ",", "timeout", ":", "NumType", "=", "None", ")", "->", "SMTPResponse", ":", "if", "self", ".", "_stream_reader", "is", "None", ":", "raise", "SMTPServerDisconnected", "(", "\"Client not connected\"", ")", "code", "=", "None", "response_lines", "=", "[", "]", "while", "True", ":", "async", "with", "self", ".", "_io_lock", ":", "line", "=", "await", "self", ".", "_readline", "(", "timeout", "=", "timeout", ")", "try", ":", "code", "=", "int", "(", "line", "[", ":", "3", "]", ")", "except", "ValueError", ":", "pass", "message", "=", "line", "[", "4", ":", "]", ".", "strip", "(", "b\" \\t\\r\\n\"", ")", ".", "decode", "(", "\"utf-8\"", ",", "\"surrogateescape\"", ")", "response_lines", ".", "append", "(", "message", ")", "if", "line", "[", "3", ":", "4", "]", "!=", "b\"-\"", ":", "break", "full_message", "=", "\"\\n\"", ".", "join", "(", "response_lines", ")", "if", "code", "is", "None", ":", "raise", "SMTPResponseException", "(", "SMTPStatus", ".", "invalid_response", ".", "value", ",", "\"Malformed SMTP response: {}\"", ".", "format", "(", "full_message", ")", ",", ")", "return", "SMTPResponse", "(", "code", ",", "full_message", ")" ]
Get a status reponse from the server. Returns an SMTPResponse namedtuple consisting of: - server response code (e.g. 250, or such, if all goes well) - server response string corresponding to response code (multiline responses are converted to a single, multiline string).
[ "Get", "a", "status", "reponse", "from", "the", "server", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/protocol.py#L111-L148
2,288
cole/aiosmtplib
src/aiosmtplib/protocol.py
SMTPProtocol.write_and_drain
async def write_and_drain(self, data: bytes, timeout: NumType = None) -> None: """ Format a command and send it to the server. """ if self._stream_writer is None: raise SMTPServerDisconnected("Client not connected") self._stream_writer.write(data) async with self._io_lock: await self._drain_writer(timeout)
python
async def write_and_drain(self, data: bytes, timeout: NumType = None) -> None: """ Format a command and send it to the server. """ if self._stream_writer is None: raise SMTPServerDisconnected("Client not connected") self._stream_writer.write(data) async with self._io_lock: await self._drain_writer(timeout)
[ "async", "def", "write_and_drain", "(", "self", ",", "data", ":", "bytes", ",", "timeout", ":", "NumType", "=", "None", ")", "->", "None", ":", "if", "self", ".", "_stream_writer", "is", "None", ":", "raise", "SMTPServerDisconnected", "(", "\"Client not connected\"", ")", "self", ".", "_stream_writer", ".", "write", "(", "data", ")", "async", "with", "self", ".", "_io_lock", ":", "await", "self", ".", "_drain_writer", "(", "timeout", ")" ]
Format a command and send it to the server.
[ "Format", "a", "command", "and", "send", "it", "to", "the", "server", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/protocol.py#L150-L160
2,289
cole/aiosmtplib
src/aiosmtplib/protocol.py
SMTPProtocol.write_message_data
async def write_message_data(self, data: bytes, timeout: NumType = None) -> None: """ Encode and write email message data. Automatically quotes lines beginning with a period per RFC821. Lone \\\\r and \\\\n characters are converted to \\\\r\\\\n characters. """ data = LINE_ENDINGS_REGEX.sub(b"\r\n", data) data = PERIOD_REGEX.sub(b"..", data) if not data.endswith(b"\r\n"): data += b"\r\n" data += b".\r\n" await self.write_and_drain(data, timeout=timeout)
python
async def write_message_data(self, data: bytes, timeout: NumType = None) -> None: """ Encode and write email message data. Automatically quotes lines beginning with a period per RFC821. Lone \\\\r and \\\\n characters are converted to \\\\r\\\\n characters. """ data = LINE_ENDINGS_REGEX.sub(b"\r\n", data) data = PERIOD_REGEX.sub(b"..", data) if not data.endswith(b"\r\n"): data += b"\r\n" data += b".\r\n" await self.write_and_drain(data, timeout=timeout)
[ "async", "def", "write_message_data", "(", "self", ",", "data", ":", "bytes", ",", "timeout", ":", "NumType", "=", "None", ")", "->", "None", ":", "data", "=", "LINE_ENDINGS_REGEX", ".", "sub", "(", "b\"\\r\\n\"", ",", "data", ")", "data", "=", "PERIOD_REGEX", ".", "sub", "(", "b\"..\"", ",", "data", ")", "if", "not", "data", ".", "endswith", "(", "b\"\\r\\n\"", ")", ":", "data", "+=", "b\"\\r\\n\"", "data", "+=", "b\".\\r\\n\"", "await", "self", ".", "write_and_drain", "(", "data", ",", "timeout", "=", "timeout", ")" ]
Encode and write email message data. Automatically quotes lines beginning with a period per RFC821. Lone \\\\r and \\\\n characters are converted to \\\\r\\\\n characters.
[ "Encode", "and", "write", "email", "message", "data", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/protocol.py#L162-L176
2,290
cole/aiosmtplib
src/aiosmtplib/protocol.py
SMTPProtocol.execute_command
async def execute_command( self, *args: bytes, timeout: NumType = None ) -> SMTPResponse: """ Sends an SMTP command along with any args to the server, and returns a response. """ command = b" ".join(args) + b"\r\n" await self.write_and_drain(command, timeout=timeout) response = await self.read_response(timeout=timeout) return response
python
async def execute_command( self, *args: bytes, timeout: NumType = None ) -> SMTPResponse: """ Sends an SMTP command along with any args to the server, and returns a response. """ command = b" ".join(args) + b"\r\n" await self.write_and_drain(command, timeout=timeout) response = await self.read_response(timeout=timeout) return response
[ "async", "def", "execute_command", "(", "self", ",", "*", "args", ":", "bytes", ",", "timeout", ":", "NumType", "=", "None", ")", "->", "SMTPResponse", ":", "command", "=", "b\" \"", ".", "join", "(", "args", ")", "+", "b\"\\r\\n\"", "await", "self", ".", "write_and_drain", "(", "command", ",", "timeout", "=", "timeout", ")", "response", "=", "await", "self", ".", "read_response", "(", "timeout", "=", "timeout", ")", "return", "response" ]
Sends an SMTP command along with any args to the server, and returns a response.
[ "Sends", "an", "SMTP", "command", "along", "with", "any", "args", "to", "the", "server", "and", "returns", "a", "response", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/protocol.py#L178-L190
2,291
cole/aiosmtplib
src/aiosmtplib/esmtp.py
ESMTP.last_ehlo_response
def last_ehlo_response(self, response: SMTPResponse) -> None: """ When setting the last EHLO response, parse the message for supported extensions and auth methods. """ extensions, auth_methods = parse_esmtp_extensions(response.message) self._last_ehlo_response = response self.esmtp_extensions = extensions self.server_auth_methods = auth_methods self.supports_esmtp = True
python
def last_ehlo_response(self, response: SMTPResponse) -> None: """ When setting the last EHLO response, parse the message for supported extensions and auth methods. """ extensions, auth_methods = parse_esmtp_extensions(response.message) self._last_ehlo_response = response self.esmtp_extensions = extensions self.server_auth_methods = auth_methods self.supports_esmtp = True
[ "def", "last_ehlo_response", "(", "self", ",", "response", ":", "SMTPResponse", ")", "->", "None", ":", "extensions", ",", "auth_methods", "=", "parse_esmtp_extensions", "(", "response", ".", "message", ")", "self", ".", "_last_ehlo_response", "=", "response", "self", ".", "esmtp_extensions", "=", "extensions", "self", ".", "server_auth_methods", "=", "auth_methods", "self", ".", "supports_esmtp", "=", "True" ]
When setting the last EHLO response, parse the message for supported extensions and auth methods.
[ "When", "setting", "the", "last", "EHLO", "response", "parse", "the", "message", "for", "supported", "extensions", "and", "auth", "methods", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/esmtp.py#L59-L68
2,292
cole/aiosmtplib
src/aiosmtplib/esmtp.py
ESMTP.helo
async def helo( self, hostname: str = None, timeout: DefaultNumType = _default ) -> SMTPResponse: """ Send the SMTP HELO command. Hostname to send for this command defaults to the FQDN of the local host. :raises SMTPHeloError: on unexpected server response code """ if hostname is None: hostname = self.source_address async with self._command_lock: response = await self.execute_command( b"HELO", hostname.encode("ascii"), timeout=timeout ) self.last_helo_response = response if response.code != SMTPStatus.completed: raise SMTPHeloError(response.code, response.message) return response
python
async def helo( self, hostname: str = None, timeout: DefaultNumType = _default ) -> SMTPResponse: """ Send the SMTP HELO command. Hostname to send for this command defaults to the FQDN of the local host. :raises SMTPHeloError: on unexpected server response code """ if hostname is None: hostname = self.source_address async with self._command_lock: response = await self.execute_command( b"HELO", hostname.encode("ascii"), timeout=timeout ) self.last_helo_response = response if response.code != SMTPStatus.completed: raise SMTPHeloError(response.code, response.message) return response
[ "async", "def", "helo", "(", "self", ",", "hostname", ":", "str", "=", "None", ",", "timeout", ":", "DefaultNumType", "=", "_default", ")", "->", "SMTPResponse", ":", "if", "hostname", "is", "None", ":", "hostname", "=", "self", ".", "source_address", "async", "with", "self", ".", "_command_lock", ":", "response", "=", "await", "self", ".", "execute_command", "(", "b\"HELO\"", ",", "hostname", ".", "encode", "(", "\"ascii\"", ")", ",", "timeout", "=", "timeout", ")", "self", ".", "last_helo_response", "=", "response", "if", "response", ".", "code", "!=", "SMTPStatus", ".", "completed", ":", "raise", "SMTPHeloError", "(", "response", ".", "code", ",", "response", ".", "message", ")", "return", "response" ]
Send the SMTP HELO command. Hostname to send for this command defaults to the FQDN of the local host. :raises SMTPHeloError: on unexpected server response code
[ "Send", "the", "SMTP", "HELO", "command", ".", "Hostname", "to", "send", "for", "this", "command", "defaults", "to", "the", "FQDN", "of", "the", "local", "host", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/esmtp.py#L86-L107
2,293
cole/aiosmtplib
src/aiosmtplib/esmtp.py
ESMTP.help
async def help(self, timeout: DefaultNumType = _default) -> str: """ Send the SMTP HELP command, which responds with help text. :raises SMTPResponseException: on unexpected server response code """ await self._ehlo_or_helo_if_needed() async with self._command_lock: response = await self.execute_command(b"HELP", timeout=timeout) success_codes = ( SMTPStatus.system_status_ok, SMTPStatus.help_message, SMTPStatus.completed, ) if response.code not in success_codes: raise SMTPResponseException(response.code, response.message) return response.message
python
async def help(self, timeout: DefaultNumType = _default) -> str: """ Send the SMTP HELP command, which responds with help text. :raises SMTPResponseException: on unexpected server response code """ await self._ehlo_or_helo_if_needed() async with self._command_lock: response = await self.execute_command(b"HELP", timeout=timeout) success_codes = ( SMTPStatus.system_status_ok, SMTPStatus.help_message, SMTPStatus.completed, ) if response.code not in success_codes: raise SMTPResponseException(response.code, response.message) return response.message
[ "async", "def", "help", "(", "self", ",", "timeout", ":", "DefaultNumType", "=", "_default", ")", "->", "str", ":", "await", "self", ".", "_ehlo_or_helo_if_needed", "(", ")", "async", "with", "self", ".", "_command_lock", ":", "response", "=", "await", "self", ".", "execute_command", "(", "b\"HELP\"", ",", "timeout", "=", "timeout", ")", "success_codes", "=", "(", "SMTPStatus", ".", "system_status_ok", ",", "SMTPStatus", ".", "help_message", ",", "SMTPStatus", ".", "completed", ",", ")", "if", "response", ".", "code", "not", "in", "success_codes", ":", "raise", "SMTPResponseException", "(", "response", ".", "code", ",", "response", ".", "message", ")", "return", "response", ".", "message" ]
Send the SMTP HELP command, which responds with help text. :raises SMTPResponseException: on unexpected server response code
[ "Send", "the", "SMTP", "HELP", "command", "which", "responds", "with", "help", "text", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/esmtp.py#L109-L127
2,294
cole/aiosmtplib
src/aiosmtplib/esmtp.py
ESMTP.noop
async def noop(self, timeout: DefaultNumType = _default) -> SMTPResponse: """ Send an SMTP NOOP command, which does nothing. :raises SMTPResponseException: on unexpected server response code """ await self._ehlo_or_helo_if_needed() async with self._command_lock: response = await self.execute_command(b"NOOP", timeout=timeout) if response.code != SMTPStatus.completed: raise SMTPResponseException(response.code, response.message) return response
python
async def noop(self, timeout: DefaultNumType = _default) -> SMTPResponse: """ Send an SMTP NOOP command, which does nothing. :raises SMTPResponseException: on unexpected server response code """ await self._ehlo_or_helo_if_needed() async with self._command_lock: response = await self.execute_command(b"NOOP", timeout=timeout) if response.code != SMTPStatus.completed: raise SMTPResponseException(response.code, response.message) return response
[ "async", "def", "noop", "(", "self", ",", "timeout", ":", "DefaultNumType", "=", "_default", ")", "->", "SMTPResponse", ":", "await", "self", ".", "_ehlo_or_helo_if_needed", "(", ")", "async", "with", "self", ".", "_command_lock", ":", "response", "=", "await", "self", ".", "execute_command", "(", "b\"NOOP\"", ",", "timeout", "=", "timeout", ")", "if", "response", ".", "code", "!=", "SMTPStatus", ".", "completed", ":", "raise", "SMTPResponseException", "(", "response", ".", "code", ",", "response", ".", "message", ")", "return", "response" ]
Send an SMTP NOOP command, which does nothing. :raises SMTPResponseException: on unexpected server response code
[ "Send", "an", "SMTP", "NOOP", "command", "which", "does", "nothing", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/esmtp.py#L145-L158
2,295
cole/aiosmtplib
src/aiosmtplib/esmtp.py
ESMTP.vrfy
async def vrfy( self, address: str, timeout: DefaultNumType = _default ) -> SMTPResponse: """ Send an SMTP VRFY command, which tests an address for validity. Not many servers support this command. :raises SMTPResponseException: on unexpected server response code """ await self._ehlo_or_helo_if_needed() parsed_address = parse_address(address) async with self._command_lock: response = await self.execute_command( b"VRFY", parsed_address.encode("ascii"), timeout=timeout ) success_codes = ( SMTPStatus.completed, SMTPStatus.will_forward, SMTPStatus.cannot_vrfy, ) if response.code not in success_codes: raise SMTPResponseException(response.code, response.message) return response
python
async def vrfy( self, address: str, timeout: DefaultNumType = _default ) -> SMTPResponse: """ Send an SMTP VRFY command, which tests an address for validity. Not many servers support this command. :raises SMTPResponseException: on unexpected server response code """ await self._ehlo_or_helo_if_needed() parsed_address = parse_address(address) async with self._command_lock: response = await self.execute_command( b"VRFY", parsed_address.encode("ascii"), timeout=timeout ) success_codes = ( SMTPStatus.completed, SMTPStatus.will_forward, SMTPStatus.cannot_vrfy, ) if response.code not in success_codes: raise SMTPResponseException(response.code, response.message) return response
[ "async", "def", "vrfy", "(", "self", ",", "address", ":", "str", ",", "timeout", ":", "DefaultNumType", "=", "_default", ")", "->", "SMTPResponse", ":", "await", "self", ".", "_ehlo_or_helo_if_needed", "(", ")", "parsed_address", "=", "parse_address", "(", "address", ")", "async", "with", "self", ".", "_command_lock", ":", "response", "=", "await", "self", ".", "execute_command", "(", "b\"VRFY\"", ",", "parsed_address", ".", "encode", "(", "\"ascii\"", ")", ",", "timeout", "=", "timeout", ")", "success_codes", "=", "(", "SMTPStatus", ".", "completed", ",", "SMTPStatus", ".", "will_forward", ",", "SMTPStatus", ".", "cannot_vrfy", ",", ")", "if", "response", ".", "code", "not", "in", "success_codes", ":", "raise", "SMTPResponseException", "(", "response", ".", "code", ",", "response", ".", "message", ")", "return", "response" ]
Send an SMTP VRFY command, which tests an address for validity. Not many servers support this command. :raises SMTPResponseException: on unexpected server response code
[ "Send", "an", "SMTP", "VRFY", "command", "which", "tests", "an", "address", "for", "validity", ".", "Not", "many", "servers", "support", "this", "command", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/esmtp.py#L160-L187
2,296
cole/aiosmtplib
src/aiosmtplib/esmtp.py
ESMTP.expn
async def expn( self, address: str, timeout: DefaultNumType = _default ) -> SMTPResponse: """ Send an SMTP EXPN command, which expands a mailing list. Not many servers support this command. :raises SMTPResponseException: on unexpected server response code """ await self._ehlo_or_helo_if_needed() parsed_address = parse_address(address) async with self._command_lock: response = await self.execute_command( b"EXPN", parsed_address.encode("ascii"), timeout=timeout ) if response.code != SMTPStatus.completed: raise SMTPResponseException(response.code, response.message) return response
python
async def expn( self, address: str, timeout: DefaultNumType = _default ) -> SMTPResponse: """ Send an SMTP EXPN command, which expands a mailing list. Not many servers support this command. :raises SMTPResponseException: on unexpected server response code """ await self._ehlo_or_helo_if_needed() parsed_address = parse_address(address) async with self._command_lock: response = await self.execute_command( b"EXPN", parsed_address.encode("ascii"), timeout=timeout ) if response.code != SMTPStatus.completed: raise SMTPResponseException(response.code, response.message) return response
[ "async", "def", "expn", "(", "self", ",", "address", ":", "str", ",", "timeout", ":", "DefaultNumType", "=", "_default", ")", "->", "SMTPResponse", ":", "await", "self", ".", "_ehlo_or_helo_if_needed", "(", ")", "parsed_address", "=", "parse_address", "(", "address", ")", "async", "with", "self", ".", "_command_lock", ":", "response", "=", "await", "self", ".", "execute_command", "(", "b\"EXPN\"", ",", "parsed_address", ".", "encode", "(", "\"ascii\"", ")", ",", "timeout", "=", "timeout", ")", "if", "response", ".", "code", "!=", "SMTPStatus", ".", "completed", ":", "raise", "SMTPResponseException", "(", "response", ".", "code", ",", "response", ".", "message", ")", "return", "response" ]
Send an SMTP EXPN command, which expands a mailing list. Not many servers support this command. :raises SMTPResponseException: on unexpected server response code
[ "Send", "an", "SMTP", "EXPN", "command", "which", "expands", "a", "mailing", "list", ".", "Not", "many", "servers", "support", "this", "command", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/esmtp.py#L189-L210
2,297
cole/aiosmtplib
src/aiosmtplib/esmtp.py
ESMTP.quit
async def quit(self, timeout: DefaultNumType = _default) -> SMTPResponse: """ Send the SMTP QUIT command, which closes the connection. Also closes the connection from our side after a response is received. :raises SMTPResponseException: on unexpected server response code """ # Can't quit without HELO/EHLO await self._ehlo_or_helo_if_needed() async with self._command_lock: response = await self.execute_command(b"QUIT", timeout=timeout) if response.code != SMTPStatus.closing: raise SMTPResponseException(response.code, response.message) self.close() return response
python
async def quit(self, timeout: DefaultNumType = _default) -> SMTPResponse: """ Send the SMTP QUIT command, which closes the connection. Also closes the connection from our side after a response is received. :raises SMTPResponseException: on unexpected server response code """ # Can't quit without HELO/EHLO await self._ehlo_or_helo_if_needed() async with self._command_lock: response = await self.execute_command(b"QUIT", timeout=timeout) if response.code != SMTPStatus.closing: raise SMTPResponseException(response.code, response.message) self.close() return response
[ "async", "def", "quit", "(", "self", ",", "timeout", ":", "DefaultNumType", "=", "_default", ")", "->", "SMTPResponse", ":", "# Can't quit without HELO/EHLO", "await", "self", ".", "_ehlo_or_helo_if_needed", "(", ")", "async", "with", "self", ".", "_command_lock", ":", "response", "=", "await", "self", ".", "execute_command", "(", "b\"QUIT\"", ",", "timeout", "=", "timeout", ")", "if", "response", ".", "code", "!=", "SMTPStatus", ".", "closing", ":", "raise", "SMTPResponseException", "(", "response", ".", "code", ",", "response", ".", "message", ")", "self", ".", "close", "(", ")", "return", "response" ]
Send the SMTP QUIT command, which closes the connection. Also closes the connection from our side after a response is received. :raises SMTPResponseException: on unexpected server response code
[ "Send", "the", "SMTP", "QUIT", "command", "which", "closes", "the", "connection", ".", "Also", "closes", "the", "connection", "from", "our", "side", "after", "a", "response", "is", "received", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/esmtp.py#L212-L229
2,298
cole/aiosmtplib
src/aiosmtplib/esmtp.py
ESMTP.rcpt
async def rcpt( self, recipient: str, options: Iterable[str] = None, timeout: DefaultNumType = _default, ) -> SMTPResponse: """ Send an SMTP RCPT command, which specifies a single recipient for the message. This command is sent once per recipient and must be preceded by 'MAIL'. :raises SMTPRecipientRefused: on unexpected server response code """ await self._ehlo_or_helo_if_needed() if options is None: options = [] options_bytes = [option.encode("ascii") for option in options] to = b"TO:" + quote_address(recipient).encode("ascii") async with self._command_lock: response = await self.execute_command( b"RCPT", to, *options_bytes, timeout=timeout ) success_codes = (SMTPStatus.completed, SMTPStatus.will_forward) if response.code not in success_codes: raise SMTPRecipientRefused(response.code, response.message, recipient) return response
python
async def rcpt( self, recipient: str, options: Iterable[str] = None, timeout: DefaultNumType = _default, ) -> SMTPResponse: """ Send an SMTP RCPT command, which specifies a single recipient for the message. This command is sent once per recipient and must be preceded by 'MAIL'. :raises SMTPRecipientRefused: on unexpected server response code """ await self._ehlo_or_helo_if_needed() if options is None: options = [] options_bytes = [option.encode("ascii") for option in options] to = b"TO:" + quote_address(recipient).encode("ascii") async with self._command_lock: response = await self.execute_command( b"RCPT", to, *options_bytes, timeout=timeout ) success_codes = (SMTPStatus.completed, SMTPStatus.will_forward) if response.code not in success_codes: raise SMTPRecipientRefused(response.code, response.message, recipient) return response
[ "async", "def", "rcpt", "(", "self", ",", "recipient", ":", "str", ",", "options", ":", "Iterable", "[", "str", "]", "=", "None", ",", "timeout", ":", "DefaultNumType", "=", "_default", ",", ")", "->", "SMTPResponse", ":", "await", "self", ".", "_ehlo_or_helo_if_needed", "(", ")", "if", "options", "is", "None", ":", "options", "=", "[", "]", "options_bytes", "=", "[", "option", ".", "encode", "(", "\"ascii\"", ")", "for", "option", "in", "options", "]", "to", "=", "b\"TO:\"", "+", "quote_address", "(", "recipient", ")", ".", "encode", "(", "\"ascii\"", ")", "async", "with", "self", ".", "_command_lock", ":", "response", "=", "await", "self", ".", "execute_command", "(", "b\"RCPT\"", ",", "to", ",", "*", "options_bytes", ",", "timeout", "=", "timeout", ")", "success_codes", "=", "(", "SMTPStatus", ".", "completed", ",", "SMTPStatus", ".", "will_forward", ")", "if", "response", ".", "code", "not", "in", "success_codes", ":", "raise", "SMTPRecipientRefused", "(", "response", ".", "code", ",", "response", ".", "message", ",", "recipient", ")", "return", "response" ]
Send an SMTP RCPT command, which specifies a single recipient for the message. This command is sent once per recipient and must be preceded by 'MAIL'. :raises SMTPRecipientRefused: on unexpected server response code
[ "Send", "an", "SMTP", "RCPT", "command", "which", "specifies", "a", "single", "recipient", "for", "the", "message", ".", "This", "command", "is", "sent", "once", "per", "recipient", "and", "must", "be", "preceded", "by", "MAIL", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/esmtp.py#L261-L291
2,299
cole/aiosmtplib
src/aiosmtplib/esmtp.py
ESMTP.data
async def data( self, message: Union[str, bytes], timeout: DefaultNumType = _default ) -> SMTPResponse: """ Send an SMTP DATA command, followed by the message given. This method transfers the actual email content to the server. :raises SMTPDataError: on unexpected server response code :raises SMTPServerDisconnected: connection lost """ await self._ehlo_or_helo_if_needed() # As data accesses protocol directly, some handling is required self._raise_error_if_disconnected() if timeout is _default: timeout = self.timeout # type: ignore if isinstance(message, str): message = message.encode("ascii") async with self._command_lock: start_response = await self.execute_command(b"DATA", timeout=timeout) if start_response.code != SMTPStatus.start_input: raise SMTPDataError(start_response.code, start_response.message) try: await self.protocol.write_message_data( # type: ignore message, timeout=timeout ) response = await self.protocol.read_response( # type: ignore timeout=timeout ) except SMTPServerDisconnected as exc: self.close() raise exc if response.code != SMTPStatus.completed: raise SMTPDataError(response.code, response.message) return response
python
async def data( self, message: Union[str, bytes], timeout: DefaultNumType = _default ) -> SMTPResponse: """ Send an SMTP DATA command, followed by the message given. This method transfers the actual email content to the server. :raises SMTPDataError: on unexpected server response code :raises SMTPServerDisconnected: connection lost """ await self._ehlo_or_helo_if_needed() # As data accesses protocol directly, some handling is required self._raise_error_if_disconnected() if timeout is _default: timeout = self.timeout # type: ignore if isinstance(message, str): message = message.encode("ascii") async with self._command_lock: start_response = await self.execute_command(b"DATA", timeout=timeout) if start_response.code != SMTPStatus.start_input: raise SMTPDataError(start_response.code, start_response.message) try: await self.protocol.write_message_data( # type: ignore message, timeout=timeout ) response = await self.protocol.read_response( # type: ignore timeout=timeout ) except SMTPServerDisconnected as exc: self.close() raise exc if response.code != SMTPStatus.completed: raise SMTPDataError(response.code, response.message) return response
[ "async", "def", "data", "(", "self", ",", "message", ":", "Union", "[", "str", ",", "bytes", "]", ",", "timeout", ":", "DefaultNumType", "=", "_default", ")", "->", "SMTPResponse", ":", "await", "self", ".", "_ehlo_or_helo_if_needed", "(", ")", "# As data accesses protocol directly, some handling is required", "self", ".", "_raise_error_if_disconnected", "(", ")", "if", "timeout", "is", "_default", ":", "timeout", "=", "self", ".", "timeout", "# type: ignore", "if", "isinstance", "(", "message", ",", "str", ")", ":", "message", "=", "message", ".", "encode", "(", "\"ascii\"", ")", "async", "with", "self", ".", "_command_lock", ":", "start_response", "=", "await", "self", ".", "execute_command", "(", "b\"DATA\"", ",", "timeout", "=", "timeout", ")", "if", "start_response", ".", "code", "!=", "SMTPStatus", ".", "start_input", ":", "raise", "SMTPDataError", "(", "start_response", ".", "code", ",", "start_response", ".", "message", ")", "try", ":", "await", "self", ".", "protocol", ".", "write_message_data", "(", "# type: ignore", "message", ",", "timeout", "=", "timeout", ")", "response", "=", "await", "self", ".", "protocol", ".", "read_response", "(", "# type: ignore", "timeout", "=", "timeout", ")", "except", "SMTPServerDisconnected", "as", "exc", ":", "self", ".", "close", "(", ")", "raise", "exc", "if", "response", ".", "code", "!=", "SMTPStatus", ".", "completed", ":", "raise", "SMTPDataError", "(", "response", ".", "code", ",", "response", ".", "message", ")", "return", "response" ]
Send an SMTP DATA command, followed by the message given. This method transfers the actual email content to the server. :raises SMTPDataError: on unexpected server response code :raises SMTPServerDisconnected: connection lost
[ "Send", "an", "SMTP", "DATA", "command", "followed", "by", "the", "message", "given", ".", "This", "method", "transfers", "the", "actual", "email", "content", "to", "the", "server", "." ]
0cd00e5059005371cbdfca995feff9183a16a51f
https://github.com/cole/aiosmtplib/blob/0cd00e5059005371cbdfca995feff9183a16a51f/src/aiosmtplib/esmtp.py#L293-L333