id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
1,100
simonvh/genomepy
genomepy/functions.py
Genome.get_random_sequences
def get_random_sequences(self, n=10, length=200, chroms=None, max_n=0.1): """Return random genomic sequences. Parameters ---------- n : int , optional Number of sequences to return. length : int , optional Length of sequences to return. chroms : list , optional Return sequences only from these chromosomes. max_n : float , optional Maximum fraction of Ns. Returns ------- coords : list List with [chrom, start, end] genomic coordinates. """ retries = 100 cutoff = length * max_n if not chroms: chroms = self.keys() try: gap_sizes = self.gap_sizes() except: gap_sizes = {} sizes = dict([(chrom, len(self[chrom]) - gap_sizes.get(chrom, 0)) for chrom in chroms]) l = [(sizes[x], x) for x in chroms if sizes[x] / len(self[x]) > 0.1 and sizes[x] > 10 * length] chroms = _weighted_selection(l, n) coords = [] count = {} for chrom in chroms: if chrom in count: count[chrom] += 1 else: count[chrom] = 1 for chrom in chroms: for i in range(retries): start = int(random.random() * (sizes[chrom] - length)) end = start + length count_n = self[chrom][start:end].seq.upper().count("N") if count_n <= cutoff: break if count_n > cutoff: raise ValueError("Failed to find suitable non-N sequence for {}".format(chrom)) coords.append([chrom, start, end]) return coords
python
def get_random_sequences(self, n=10, length=200, chroms=None, max_n=0.1): """Return random genomic sequences. Parameters ---------- n : int , optional Number of sequences to return. length : int , optional Length of sequences to return. chroms : list , optional Return sequences only from these chromosomes. max_n : float , optional Maximum fraction of Ns. Returns ------- coords : list List with [chrom, start, end] genomic coordinates. """ retries = 100 cutoff = length * max_n if not chroms: chroms = self.keys() try: gap_sizes = self.gap_sizes() except: gap_sizes = {} sizes = dict([(chrom, len(self[chrom]) - gap_sizes.get(chrom, 0)) for chrom in chroms]) l = [(sizes[x], x) for x in chroms if sizes[x] / len(self[x]) > 0.1 and sizes[x] > 10 * length] chroms = _weighted_selection(l, n) coords = [] count = {} for chrom in chroms: if chrom in count: count[chrom] += 1 else: count[chrom] = 1 for chrom in chroms: for i in range(retries): start = int(random.random() * (sizes[chrom] - length)) end = start + length count_n = self[chrom][start:end].seq.upper().count("N") if count_n <= cutoff: break if count_n > cutoff: raise ValueError("Failed to find suitable non-N sequence for {}".format(chrom)) coords.append([chrom, start, end]) return coords
[ "def", "get_random_sequences", "(", "self", ",", "n", "=", "10", ",", "length", "=", "200", ",", "chroms", "=", "None", ",", "max_n", "=", "0.1", ")", ":", "retries", "=", "100", "cutoff", "=", "length", "*", "max_n", "if", "not", "chroms", ":", "chroms", "=", "self", ".", "keys", "(", ")", "try", ":", "gap_sizes", "=", "self", ".", "gap_sizes", "(", ")", "except", ":", "gap_sizes", "=", "{", "}", "sizes", "=", "dict", "(", "[", "(", "chrom", ",", "len", "(", "self", "[", "chrom", "]", ")", "-", "gap_sizes", ".", "get", "(", "chrom", ",", "0", ")", ")", "for", "chrom", "in", "chroms", "]", ")", "l", "=", "[", "(", "sizes", "[", "x", "]", ",", "x", ")", "for", "x", "in", "chroms", "if", "sizes", "[", "x", "]", "/", "len", "(", "self", "[", "x", "]", ")", ">", "0.1", "and", "sizes", "[", "x", "]", ">", "10", "*", "length", "]", "chroms", "=", "_weighted_selection", "(", "l", ",", "n", ")", "coords", "=", "[", "]", "count", "=", "{", "}", "for", "chrom", "in", "chroms", ":", "if", "chrom", "in", "count", ":", "count", "[", "chrom", "]", "+=", "1", "else", ":", "count", "[", "chrom", "]", "=", "1", "for", "chrom", "in", "chroms", ":", "for", "i", "in", "range", "(", "retries", ")", ":", "start", "=", "int", "(", "random", ".", "random", "(", ")", "*", "(", "sizes", "[", "chrom", "]", "-", "length", ")", ")", "end", "=", "start", "+", "length", "count_n", "=", "self", "[", "chrom", "]", "[", "start", ":", "end", "]", ".", "seq", ".", "upper", "(", ")", ".", "count", "(", "\"N\"", ")", "if", "count_n", "<=", "cutoff", ":", "break", "if", "count_n", ">", "cutoff", ":", "raise", "ValueError", "(", "\"Failed to find suitable non-N sequence for {}\"", ".", "format", "(", "chrom", ")", ")", "coords", ".", "append", "(", "[", "chrom", ",", "start", ",", "end", "]", ")", "return", "coords" ]
Return random genomic sequences. Parameters ---------- n : int , optional Number of sequences to return. length : int , optional Length of sequences to return. chroms : list , optional Return sequences only from these chromosomes. max_n : float , optional Maximum fraction of Ns. Returns ------- coords : list List with [chrom, start, end] genomic coordinates.
[ "Return", "random", "genomic", "sequences", "." ]
abace2366511dbe855fe1430b1f7d9ec4cbf6d29
https://github.com/simonvh/genomepy/blob/abace2366511dbe855fe1430b1f7d9ec4cbf6d29/genomepy/functions.py#L455-L512
1,101
simonvh/genomepy
genomepy/cli.py
search
def search(term, provider=None): """Search for genomes that contain TERM in their name or description.""" for row in genomepy.search(term, provider): print("\t".join([x.decode('utf-8', 'ignore') for x in row]))
python
def search(term, provider=None): """Search for genomes that contain TERM in their name or description.""" for row in genomepy.search(term, provider): print("\t".join([x.decode('utf-8', 'ignore') for x in row]))
[ "def", "search", "(", "term", ",", "provider", "=", "None", ")", ":", "for", "row", "in", "genomepy", ".", "search", "(", "term", ",", "provider", ")", ":", "print", "(", "\"\\t\"", ".", "join", "(", "[", "x", ".", "decode", "(", "'utf-8'", ",", "'ignore'", ")", "for", "x", "in", "row", "]", ")", ")" ]
Search for genomes that contain TERM in their name or description.
[ "Search", "for", "genomes", "that", "contain", "TERM", "in", "their", "name", "or", "description", "." ]
abace2366511dbe855fe1430b1f7d9ec4cbf6d29
https://github.com/simonvh/genomepy/blob/abace2366511dbe855fe1430b1f7d9ec4cbf6d29/genomepy/cli.py#L20-L23
1,102
simonvh/genomepy
genomepy/cli.py
install
def install(name, provider, genome_dir, localname, mask, regex, match, annotation): """Install genome NAME from provider PROVIDER in directory GENOME_DIR.""" genomepy.install_genome( name, provider, genome_dir=genome_dir, localname=localname, mask=mask, regex=regex, invert_match=not(match), annotation=annotation)
python
def install(name, provider, genome_dir, localname, mask, regex, match, annotation): """Install genome NAME from provider PROVIDER in directory GENOME_DIR.""" genomepy.install_genome( name, provider, genome_dir=genome_dir, localname=localname, mask=mask, regex=regex, invert_match=not(match), annotation=annotation)
[ "def", "install", "(", "name", ",", "provider", ",", "genome_dir", ",", "localname", ",", "mask", ",", "regex", ",", "match", ",", "annotation", ")", ":", "genomepy", ".", "install_genome", "(", "name", ",", "provider", ",", "genome_dir", "=", "genome_dir", ",", "localname", "=", "localname", ",", "mask", "=", "mask", ",", "regex", "=", "regex", ",", "invert_match", "=", "not", "(", "match", ")", ",", "annotation", "=", "annotation", ")" ]
Install genome NAME from provider PROVIDER in directory GENOME_DIR.
[ "Install", "genome", "NAME", "from", "provider", "PROVIDER", "in", "directory", "GENOME_DIR", "." ]
abace2366511dbe855fe1430b1f7d9ec4cbf6d29
https://github.com/simonvh/genomepy/blob/abace2366511dbe855fe1430b1f7d9ec4cbf6d29/genomepy/cli.py#L34-L38
1,103
simonvh/genomepy
genomepy/utils.py
generate_gap_bed
def generate_gap_bed(fname, outname): """ Generate a BED file with gap locations. Parameters ---------- fname : str Filename of input FASTA file. outname : str Filename of output BED file. """ f = Fasta(fname) with open(outname, "w") as bed: for chrom in f.keys(): for m in re.finditer(r'N+', f[chrom][:].seq): bed.write("{}\t{}\t{}\n".format(chrom, m.start(0), m.end(0)))
python
def generate_gap_bed(fname, outname): """ Generate a BED file with gap locations. Parameters ---------- fname : str Filename of input FASTA file. outname : str Filename of output BED file. """ f = Fasta(fname) with open(outname, "w") as bed: for chrom in f.keys(): for m in re.finditer(r'N+', f[chrom][:].seq): bed.write("{}\t{}\t{}\n".format(chrom, m.start(0), m.end(0)))
[ "def", "generate_gap_bed", "(", "fname", ",", "outname", ")", ":", "f", "=", "Fasta", "(", "fname", ")", "with", "open", "(", "outname", ",", "\"w\"", ")", "as", "bed", ":", "for", "chrom", "in", "f", ".", "keys", "(", ")", ":", "for", "m", "in", "re", ".", "finditer", "(", "r'N+'", ",", "f", "[", "chrom", "]", "[", ":", "]", ".", "seq", ")", ":", "bed", ".", "write", "(", "\"{}\\t{}\\t{}\\n\"", ".", "format", "(", "chrom", ",", "m", ".", "start", "(", "0", ")", ",", "m", ".", "end", "(", "0", ")", ")", ")" ]
Generate a BED file with gap locations. Parameters ---------- fname : str Filename of input FASTA file. outname : str Filename of output BED file.
[ "Generate", "a", "BED", "file", "with", "gap", "locations", "." ]
abace2366511dbe855fe1430b1f7d9ec4cbf6d29
https://github.com/simonvh/genomepy/blob/abace2366511dbe855fe1430b1f7d9ec4cbf6d29/genomepy/utils.py#L10-L25
1,104
simonvh/genomepy
genomepy/utils.py
generate_sizes
def generate_sizes(name, genome_dir): """Generate a sizes file with length of sequences in FASTA file.""" fa = os.path.join(genome_dir, name, "{}.fa".format(name)) sizes = fa + ".sizes" g = Fasta(fa) with open(sizes, "w") as f: for seqname in g.keys(): f.write("{}\t{}\n".format(seqname, len(g[seqname])))
python
def generate_sizes(name, genome_dir): """Generate a sizes file with length of sequences in FASTA file.""" fa = os.path.join(genome_dir, name, "{}.fa".format(name)) sizes = fa + ".sizes" g = Fasta(fa) with open(sizes, "w") as f: for seqname in g.keys(): f.write("{}\t{}\n".format(seqname, len(g[seqname])))
[ "def", "generate_sizes", "(", "name", ",", "genome_dir", ")", ":", "fa", "=", "os", ".", "path", ".", "join", "(", "genome_dir", ",", "name", ",", "\"{}.fa\"", ".", "format", "(", "name", ")", ")", "sizes", "=", "fa", "+", "\".sizes\"", "g", "=", "Fasta", "(", "fa", ")", "with", "open", "(", "sizes", ",", "\"w\"", ")", "as", "f", ":", "for", "seqname", "in", "g", ".", "keys", "(", ")", ":", "f", ".", "write", "(", "\"{}\\t{}\\n\"", ".", "format", "(", "seqname", ",", "len", "(", "g", "[", "seqname", "]", ")", ")", ")" ]
Generate a sizes file with length of sequences in FASTA file.
[ "Generate", "a", "sizes", "file", "with", "length", "of", "sequences", "in", "FASTA", "file", "." ]
abace2366511dbe855fe1430b1f7d9ec4cbf6d29
https://github.com/simonvh/genomepy/blob/abace2366511dbe855fe1430b1f7d9ec4cbf6d29/genomepy/utils.py#L27-L34
1,105
simonvh/genomepy
genomepy/utils.py
filter_fasta
def filter_fasta(infa, outfa, regex=".*", v=False, force=False): """Filter fasta file based on regex. Parameters ---------- infa : str Filename of input fasta file. outfa : str Filename of output fasta file. Cannot be the same as infa. regex : str, optional Regular expression used for selecting sequences. v : bool, optional If set to True, select all sequence *not* matching regex. force : bool, optional If set to True, overwrite outfa if it already exists. Returns ------- fasta : Fasta instance pyfaidx Fasta instance of newly created file """ if infa == outfa: raise ValueError("Input and output FASTA are the same file.") if os.path.exists(outfa): if force: os.unlink(outfa) if os.path.exists(outfa + ".fai"): os.unlink(outfa + ".fai") else: raise ValueError( "{} already exists, set force to True to overwrite".format(outfa)) filt_function = re.compile(regex).search fa = Fasta(infa, filt_function=filt_function) seqs = fa.keys() if v: original_fa = Fasta(infa) seqs = [s for s in original_fa.keys() if s not in seqs] fa = original_fa if len(seqs) == 0: raise ValueError("No sequences left after filtering!") with open(outfa, "w") as out: for chrom in seqs: out.write(">{}\n".format(fa[chrom].name)) out.write("{}\n".format(fa[chrom][:].seq)) return Fasta(outfa)
python
def filter_fasta(infa, outfa, regex=".*", v=False, force=False): """Filter fasta file based on regex. Parameters ---------- infa : str Filename of input fasta file. outfa : str Filename of output fasta file. Cannot be the same as infa. regex : str, optional Regular expression used for selecting sequences. v : bool, optional If set to True, select all sequence *not* matching regex. force : bool, optional If set to True, overwrite outfa if it already exists. Returns ------- fasta : Fasta instance pyfaidx Fasta instance of newly created file """ if infa == outfa: raise ValueError("Input and output FASTA are the same file.") if os.path.exists(outfa): if force: os.unlink(outfa) if os.path.exists(outfa + ".fai"): os.unlink(outfa + ".fai") else: raise ValueError( "{} already exists, set force to True to overwrite".format(outfa)) filt_function = re.compile(regex).search fa = Fasta(infa, filt_function=filt_function) seqs = fa.keys() if v: original_fa = Fasta(infa) seqs = [s for s in original_fa.keys() if s not in seqs] fa = original_fa if len(seqs) == 0: raise ValueError("No sequences left after filtering!") with open(outfa, "w") as out: for chrom in seqs: out.write(">{}\n".format(fa[chrom].name)) out.write("{}\n".format(fa[chrom][:].seq)) return Fasta(outfa)
[ "def", "filter_fasta", "(", "infa", ",", "outfa", ",", "regex", "=", "\".*\"", ",", "v", "=", "False", ",", "force", "=", "False", ")", ":", "if", "infa", "==", "outfa", ":", "raise", "ValueError", "(", "\"Input and output FASTA are the same file.\"", ")", "if", "os", ".", "path", ".", "exists", "(", "outfa", ")", ":", "if", "force", ":", "os", ".", "unlink", "(", "outfa", ")", "if", "os", ".", "path", ".", "exists", "(", "outfa", "+", "\".fai\"", ")", ":", "os", ".", "unlink", "(", "outfa", "+", "\".fai\"", ")", "else", ":", "raise", "ValueError", "(", "\"{} already exists, set force to True to overwrite\"", ".", "format", "(", "outfa", ")", ")", "filt_function", "=", "re", ".", "compile", "(", "regex", ")", ".", "search", "fa", "=", "Fasta", "(", "infa", ",", "filt_function", "=", "filt_function", ")", "seqs", "=", "fa", ".", "keys", "(", ")", "if", "v", ":", "original_fa", "=", "Fasta", "(", "infa", ")", "seqs", "=", "[", "s", "for", "s", "in", "original_fa", ".", "keys", "(", ")", "if", "s", "not", "in", "seqs", "]", "fa", "=", "original_fa", "if", "len", "(", "seqs", ")", "==", "0", ":", "raise", "ValueError", "(", "\"No sequences left after filtering!\"", ")", "with", "open", "(", "outfa", ",", "\"w\"", ")", "as", "out", ":", "for", "chrom", "in", "seqs", ":", "out", ".", "write", "(", "\">{}\\n\"", ".", "format", "(", "fa", "[", "chrom", "]", ".", "name", ")", ")", "out", ".", "write", "(", "\"{}\\n\"", ".", "format", "(", "fa", "[", "chrom", "]", "[", ":", "]", ".", "seq", ")", ")", "return", "Fasta", "(", "outfa", ")" ]
Filter fasta file based on regex. Parameters ---------- infa : str Filename of input fasta file. outfa : str Filename of output fasta file. Cannot be the same as infa. regex : str, optional Regular expression used for selecting sequences. v : bool, optional If set to True, select all sequence *not* matching regex. force : bool, optional If set to True, overwrite outfa if it already exists. Returns ------- fasta : Fasta instance pyfaidx Fasta instance of newly created file
[ "Filter", "fasta", "file", "based", "on", "regex", "." ]
abace2366511dbe855fe1430b1f7d9ec4cbf6d29
https://github.com/simonvh/genomepy/blob/abace2366511dbe855fe1430b1f7d9ec4cbf6d29/genomepy/utils.py#L36-L89
1,106
simonvh/genomepy
genomepy/utils.py
cmd_ok
def cmd_ok(cmd): """Returns True if cmd can be run. """ try: sp.check_call(cmd, stderr=sp.PIPE, stdout=sp.PIPE) except sp.CalledProcessError: # bwa gives return code of 1 with no argument pass except: sys.stderr.write("{} not found, skipping\n".format(cmd)) return False return True
python
def cmd_ok(cmd): """Returns True if cmd can be run. """ try: sp.check_call(cmd, stderr=sp.PIPE, stdout=sp.PIPE) except sp.CalledProcessError: # bwa gives return code of 1 with no argument pass except: sys.stderr.write("{} not found, skipping\n".format(cmd)) return False return True
[ "def", "cmd_ok", "(", "cmd", ")", ":", "try", ":", "sp", ".", "check_call", "(", "cmd", ",", "stderr", "=", "sp", ".", "PIPE", ",", "stdout", "=", "sp", ".", "PIPE", ")", "except", "sp", ".", "CalledProcessError", ":", "# bwa gives return code of 1 with no argument", "pass", "except", ":", "sys", ".", "stderr", ".", "write", "(", "\"{} not found, skipping\\n\"", ".", "format", "(", "cmd", ")", ")", "return", "False", "return", "True" ]
Returns True if cmd can be run.
[ "Returns", "True", "if", "cmd", "can", "be", "run", "." ]
abace2366511dbe855fe1430b1f7d9ec4cbf6d29
https://github.com/simonvh/genomepy/blob/abace2366511dbe855fe1430b1f7d9ec4cbf6d29/genomepy/utils.py#L101-L112
1,107
simonvh/genomepy
genomepy/utils.py
run_index_cmd
def run_index_cmd(name, cmd): """Run command, show errors if the returncode is non-zero.""" sys.stderr.write("Creating {} index...\n".format(name)) # Create index p = sp.Popen(cmd, shell=True, stdout=sp.PIPE, stderr=sp.PIPE) stdout, stderr = p.communicate() if p.returncode != 0: sys.stderr.write("Index for {} failed\n".format(name)) sys.stderr.write(stdout) sys.stderr.write(stderr)
python
def run_index_cmd(name, cmd): """Run command, show errors if the returncode is non-zero.""" sys.stderr.write("Creating {} index...\n".format(name)) # Create index p = sp.Popen(cmd, shell=True, stdout=sp.PIPE, stderr=sp.PIPE) stdout, stderr = p.communicate() if p.returncode != 0: sys.stderr.write("Index for {} failed\n".format(name)) sys.stderr.write(stdout) sys.stderr.write(stderr)
[ "def", "run_index_cmd", "(", "name", ",", "cmd", ")", ":", "sys", ".", "stderr", ".", "write", "(", "\"Creating {} index...\\n\"", ".", "format", "(", "name", ")", ")", "# Create index", "p", "=", "sp", ".", "Popen", "(", "cmd", ",", "shell", "=", "True", ",", "stdout", "=", "sp", ".", "PIPE", ",", "stderr", "=", "sp", ".", "PIPE", ")", "stdout", ",", "stderr", "=", "p", ".", "communicate", "(", ")", "if", "p", ".", "returncode", "!=", "0", ":", "sys", ".", "stderr", ".", "write", "(", "\"Index for {} failed\\n\"", ".", "format", "(", "name", ")", ")", "sys", ".", "stderr", ".", "write", "(", "stdout", ")", "sys", ".", "stderr", ".", "write", "(", "stderr", ")" ]
Run command, show errors if the returncode is non-zero.
[ "Run", "command", "show", "errors", "if", "the", "returncode", "is", "non", "-", "zero", "." ]
abace2366511dbe855fe1430b1f7d9ec4cbf6d29
https://github.com/simonvh/genomepy/blob/abace2366511dbe855fe1430b1f7d9ec4cbf6d29/genomepy/utils.py#L114-L123
1,108
peo3/cgroup-utils
cgutils/cgroup.py
scan_cgroups
def scan_cgroups(subsys_name, filters=list()): """ It returns a control group hierarchy which belong to the subsys_name. When collecting cgroups, filters are applied to the cgroups. See pydoc of apply_filters method of CGroup for more information about the filters. """ status = SubsystemStatus() if subsys_name not in status.get_all(): raise NoSuchSubsystemError("No such subsystem found: " + subsys_name) if subsys_name not in status.get_available(): raise EnvironmentError("Disabled in the kernel: " + subsys_name) if subsys_name not in status.get_enabled(): raise EnvironmentError("Not enabled in the system: " + subsys_name) subsystem = _get_subsystem(subsys_name) mount_point = status.get_path(subsys_name) return _scan_cgroups_recursive(subsystem, mount_point, mount_point, filters)
python
def scan_cgroups(subsys_name, filters=list()): """ It returns a control group hierarchy which belong to the subsys_name. When collecting cgroups, filters are applied to the cgroups. See pydoc of apply_filters method of CGroup for more information about the filters. """ status = SubsystemStatus() if subsys_name not in status.get_all(): raise NoSuchSubsystemError("No such subsystem found: " + subsys_name) if subsys_name not in status.get_available(): raise EnvironmentError("Disabled in the kernel: " + subsys_name) if subsys_name not in status.get_enabled(): raise EnvironmentError("Not enabled in the system: " + subsys_name) subsystem = _get_subsystem(subsys_name) mount_point = status.get_path(subsys_name) return _scan_cgroups_recursive(subsystem, mount_point, mount_point, filters)
[ "def", "scan_cgroups", "(", "subsys_name", ",", "filters", "=", "list", "(", ")", ")", ":", "status", "=", "SubsystemStatus", "(", ")", "if", "subsys_name", "not", "in", "status", ".", "get_all", "(", ")", ":", "raise", "NoSuchSubsystemError", "(", "\"No such subsystem found: \"", "+", "subsys_name", ")", "if", "subsys_name", "not", "in", "status", ".", "get_available", "(", ")", ":", "raise", "EnvironmentError", "(", "\"Disabled in the kernel: \"", "+", "subsys_name", ")", "if", "subsys_name", "not", "in", "status", ".", "get_enabled", "(", ")", ":", "raise", "EnvironmentError", "(", "\"Not enabled in the system: \"", "+", "subsys_name", ")", "subsystem", "=", "_get_subsystem", "(", "subsys_name", ")", "mount_point", "=", "status", ".", "get_path", "(", "subsys_name", ")", "return", "_scan_cgroups_recursive", "(", "subsystem", ",", "mount_point", ",", "mount_point", ",", "filters", ")" ]
It returns a control group hierarchy which belong to the subsys_name. When collecting cgroups, filters are applied to the cgroups. See pydoc of apply_filters method of CGroup for more information about the filters.
[ "It", "returns", "a", "control", "group", "hierarchy", "which", "belong", "to", "the", "subsys_name", ".", "When", "collecting", "cgroups", "filters", "are", "applied", "to", "the", "cgroups", ".", "See", "pydoc", "of", "apply_filters", "method", "of", "CGroup", "for", "more", "information", "about", "the", "filters", "." ]
fd7e99f438ce334bac5669fba0d08a6502fd7a82
https://github.com/peo3/cgroup-utils/blob/fd7e99f438ce334bac5669fba0d08a6502fd7a82/cgutils/cgroup.py#L917-L935
1,109
peo3/cgroup-utils
cgutils/cgroup.py
walk_cgroups
def walk_cgroups(cgroup, action, opaque): """ The function applies the action function with the opaque object to each control group under the cgroup recursively. """ action(cgroup, opaque) for child in cgroup.childs: walk_cgroups(child, action, opaque)
python
def walk_cgroups(cgroup, action, opaque): """ The function applies the action function with the opaque object to each control group under the cgroup recursively. """ action(cgroup, opaque) for child in cgroup.childs: walk_cgroups(child, action, opaque)
[ "def", "walk_cgroups", "(", "cgroup", ",", "action", ",", "opaque", ")", ":", "action", "(", "cgroup", ",", "opaque", ")", "for", "child", "in", "cgroup", ".", "childs", ":", "walk_cgroups", "(", "child", ",", "action", ",", "opaque", ")" ]
The function applies the action function with the opaque object to each control group under the cgroup recursively.
[ "The", "function", "applies", "the", "action", "function", "with", "the", "opaque", "object", "to", "each", "control", "group", "under", "the", "cgroup", "recursively", "." ]
fd7e99f438ce334bac5669fba0d08a6502fd7a82
https://github.com/peo3/cgroup-utils/blob/fd7e99f438ce334bac5669fba0d08a6502fd7a82/cgutils/cgroup.py#L938-L945
1,110
peo3/cgroup-utils
cgutils/cgroup.py
get_cgroup
def get_cgroup(fullpath): """ It returns a CGroup object which is pointed by the fullpath. """ # Canonicalize symbolic links fullpath = os.path.realpath(fullpath) status = SubsystemStatus() name = None for name, path in status.paths.items(): if path in fullpath: break else: raise Exception('Invalid path: ' + fullpath) subsys = _get_subsystem(name) return CGroup(subsys, fullpath)
python
def get_cgroup(fullpath): """ It returns a CGroup object which is pointed by the fullpath. """ # Canonicalize symbolic links fullpath = os.path.realpath(fullpath) status = SubsystemStatus() name = None for name, path in status.paths.items(): if path in fullpath: break else: raise Exception('Invalid path: ' + fullpath) subsys = _get_subsystem(name) return CGroup(subsys, fullpath)
[ "def", "get_cgroup", "(", "fullpath", ")", ":", "# Canonicalize symbolic links", "fullpath", "=", "os", ".", "path", ".", "realpath", "(", "fullpath", ")", "status", "=", "SubsystemStatus", "(", ")", "name", "=", "None", "for", "name", ",", "path", "in", "status", ".", "paths", ".", "items", "(", ")", ":", "if", "path", "in", "fullpath", ":", "break", "else", ":", "raise", "Exception", "(", "'Invalid path: '", "+", "fullpath", ")", "subsys", "=", "_get_subsystem", "(", "name", ")", "return", "CGroup", "(", "subsys", ",", "fullpath", ")" ]
It returns a CGroup object which is pointed by the fullpath.
[ "It", "returns", "a", "CGroup", "object", "which", "is", "pointed", "by", "the", "fullpath", "." ]
fd7e99f438ce334bac5669fba0d08a6502fd7a82
https://github.com/peo3/cgroup-utils/blob/fd7e99f438ce334bac5669fba0d08a6502fd7a82/cgutils/cgroup.py#L948-L964
1,111
peo3/cgroup-utils
cgutils/cgroup.py
RdmaStat.parse
def parse(content): """ Parse rdma.curren and rdma.max Example contents: mlx4_0 hca_handle=2 hca_object=2000 ocrdma1 hca_handle=3 hca_object=max >>> RdmaStat.parse("mlx4_0 hca_handle=2 hca_object=2000\\nocrdma1 hca_handle=3 hca_object=max") {'mlx4_0': {'hca_handle': 2, 'hca_object': 2000}, 'ocrdma1': {'hca_handle': 3, 'hca_object': 'max'}} """ ret = {} lines = content.split('\n') for line in lines: m = RdmaStat._RE.match(line) if m is None: continue name = m.group('name') hca_handle = long(m.group('hca_handle')) hca_object = m.group('hca_object') if hca_object != "max": hca_object = long(hca_object) ret[name] = {"hca_handle": hca_handle, "hca_object": hca_object} return ret
python
def parse(content): """ Parse rdma.curren and rdma.max Example contents: mlx4_0 hca_handle=2 hca_object=2000 ocrdma1 hca_handle=3 hca_object=max >>> RdmaStat.parse("mlx4_0 hca_handle=2 hca_object=2000\\nocrdma1 hca_handle=3 hca_object=max") {'mlx4_0': {'hca_handle': 2, 'hca_object': 2000}, 'ocrdma1': {'hca_handle': 3, 'hca_object': 'max'}} """ ret = {} lines = content.split('\n') for line in lines: m = RdmaStat._RE.match(line) if m is None: continue name = m.group('name') hca_handle = long(m.group('hca_handle')) hca_object = m.group('hca_object') if hca_object != "max": hca_object = long(hca_object) ret[name] = {"hca_handle": hca_handle, "hca_object": hca_object} return ret
[ "def", "parse", "(", "content", ")", ":", "ret", "=", "{", "}", "lines", "=", "content", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "m", "=", "RdmaStat", ".", "_RE", ".", "match", "(", "line", ")", "if", "m", "is", "None", ":", "continue", "name", "=", "m", ".", "group", "(", "'name'", ")", "hca_handle", "=", "long", "(", "m", ".", "group", "(", "'hca_handle'", ")", ")", "hca_object", "=", "m", ".", "group", "(", "'hca_object'", ")", "if", "hca_object", "!=", "\"max\"", ":", "hca_object", "=", "long", "(", "hca_object", ")", "ret", "[", "name", "]", "=", "{", "\"hca_handle\"", ":", "hca_handle", ",", "\"hca_object\"", ":", "hca_object", "}", "return", "ret" ]
Parse rdma.curren and rdma.max Example contents: mlx4_0 hca_handle=2 hca_object=2000 ocrdma1 hca_handle=3 hca_object=max >>> RdmaStat.parse("mlx4_0 hca_handle=2 hca_object=2000\\nocrdma1 hca_handle=3 hca_object=max") {'mlx4_0': {'hca_handle': 2, 'hca_object': 2000}, 'ocrdma1': {'hca_handle': 3, 'hca_object': 'max'}}
[ "Parse", "rdma", ".", "curren", "and", "rdma", ".", "max" ]
fd7e99f438ce334bac5669fba0d08a6502fd7a82
https://github.com/peo3/cgroup-utils/blob/fd7e99f438ce334bac5669fba0d08a6502fd7a82/cgutils/cgroup.py#L313-L335
1,112
peo3/cgroup-utils
cgutils/cgroup.py
CGroup.apply_filters
def apply_filters(self, filters): """ It applies a specified filters. The filters are used to reduce the control groups which are accessed by get_confgs, get_stats, and get_defaults methods. """ _configs = self.configs _stats = self.stats self.configs = {} self.stats = {} for f in filters: if f in _configs: self.configs[f] = _configs[f] elif f in _stats: self.stats[f] = _stats[f] else: raise NoSuchControlFileError("%s for %s" % (f, self.subsystem.name))
python
def apply_filters(self, filters): """ It applies a specified filters. The filters are used to reduce the control groups which are accessed by get_confgs, get_stats, and get_defaults methods. """ _configs = self.configs _stats = self.stats self.configs = {} self.stats = {} for f in filters: if f in _configs: self.configs[f] = _configs[f] elif f in _stats: self.stats[f] = _stats[f] else: raise NoSuchControlFileError("%s for %s" % (f, self.subsystem.name))
[ "def", "apply_filters", "(", "self", ",", "filters", ")", ":", "_configs", "=", "self", ".", "configs", "_stats", "=", "self", ".", "stats", "self", ".", "configs", "=", "{", "}", "self", ".", "stats", "=", "{", "}", "for", "f", "in", "filters", ":", "if", "f", "in", "_configs", ":", "self", ".", "configs", "[", "f", "]", "=", "_configs", "[", "f", "]", "elif", "f", "in", "_stats", ":", "self", ".", "stats", "[", "f", "]", "=", "_stats", "[", "f", "]", "else", ":", "raise", "NoSuchControlFileError", "(", "\"%s for %s\"", "%", "(", "f", ",", "self", ".", "subsystem", ".", "name", ")", ")" ]
It applies a specified filters. The filters are used to reduce the control groups which are accessed by get_confgs, get_stats, and get_defaults methods.
[ "It", "applies", "a", "specified", "filters", ".", "The", "filters", "are", "used", "to", "reduce", "the", "control", "groups", "which", "are", "accessed", "by", "get_confgs", "get_stats", "and", "get_defaults", "methods", "." ]
fd7e99f438ce334bac5669fba0d08a6502fd7a82
https://github.com/peo3/cgroup-utils/blob/fd7e99f438ce334bac5669fba0d08a6502fd7a82/cgutils/cgroup.py#L722-L737
1,113
peo3/cgroup-utils
cgutils/cgroup.py
CGroup.get_configs
def get_configs(self): """ It returns a name and a current value pairs of control files which are categorised in the configs group. """ configs = {} for name, default in self.configs.items(): cls = default.__class__ path = self.paths[name] if os.path.exists(path): try: configs[name] = self._PARSERS[cls](fileops.read(path)) except IOError as e: if e.errno == errno.EOPNOTSUPP: # Since 3.5 memory.memsw.* are always created even if disabled. # If disabled we will get EOPNOTSUPP when read or write them. # See commit af36f906c0f4c2ffa0482ecdf856a33dc88ae8c5 of the kernel. pass else: raise return configs
python
def get_configs(self): """ It returns a name and a current value pairs of control files which are categorised in the configs group. """ configs = {} for name, default in self.configs.items(): cls = default.__class__ path = self.paths[name] if os.path.exists(path): try: configs[name] = self._PARSERS[cls](fileops.read(path)) except IOError as e: if e.errno == errno.EOPNOTSUPP: # Since 3.5 memory.memsw.* are always created even if disabled. # If disabled we will get EOPNOTSUPP when read or write them. # See commit af36f906c0f4c2ffa0482ecdf856a33dc88ae8c5 of the kernel. pass else: raise return configs
[ "def", "get_configs", "(", "self", ")", ":", "configs", "=", "{", "}", "for", "name", ",", "default", "in", "self", ".", "configs", ".", "items", "(", ")", ":", "cls", "=", "default", ".", "__class__", "path", "=", "self", ".", "paths", "[", "name", "]", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "try", ":", "configs", "[", "name", "]", "=", "self", ".", "_PARSERS", "[", "cls", "]", "(", "fileops", ".", "read", "(", "path", ")", ")", "except", "IOError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "EOPNOTSUPP", ":", "# Since 3.5 memory.memsw.* are always created even if disabled.", "# If disabled we will get EOPNOTSUPP when read or write them.", "# See commit af36f906c0f4c2ffa0482ecdf856a33dc88ae8c5 of the kernel.", "pass", "else", ":", "raise", "return", "configs" ]
It returns a name and a current value pairs of control files which are categorised in the configs group.
[ "It", "returns", "a", "name", "and", "a", "current", "value", "pairs", "of", "control", "files", "which", "are", "categorised", "in", "the", "configs", "group", "." ]
fd7e99f438ce334bac5669fba0d08a6502fd7a82
https://github.com/peo3/cgroup-utils/blob/fd7e99f438ce334bac5669fba0d08a6502fd7a82/cgutils/cgroup.py#L739-L759
1,114
peo3/cgroup-utils
cgutils/cgroup.py
CGroup.get_stats
def get_stats(self): """ It returns a name and a value pairs of control files which are categorised in the stats group. """ stats = {} for name, cls in self.stats.items(): path = self.paths[name] if os.path.exists(path): try: stats[name] = self._PARSERS[cls](fileops.read(path)) except IOError as e: # XXX: we have to distinguish unexpected errors from the expected ones if e.errno == errno.EOPNOTSUPP: # Since 3.5 memory.memsw.* are always created even if disabled. # If disabled we will get EOPNOTSUPP when read or write them. # See commit af36f906c0f4c2ffa0482ecdf856a33dc88ae8c5 of the kernel. pass if e.errno == errno.EIO: # memory.kmem.slabinfo throws EIO until limit_in_bytes is set. pass else: raise return stats
python
def get_stats(self): """ It returns a name and a value pairs of control files which are categorised in the stats group. """ stats = {} for name, cls in self.stats.items(): path = self.paths[name] if os.path.exists(path): try: stats[name] = self._PARSERS[cls](fileops.read(path)) except IOError as e: # XXX: we have to distinguish unexpected errors from the expected ones if e.errno == errno.EOPNOTSUPP: # Since 3.5 memory.memsw.* are always created even if disabled. # If disabled we will get EOPNOTSUPP when read or write them. # See commit af36f906c0f4c2ffa0482ecdf856a33dc88ae8c5 of the kernel. pass if e.errno == errno.EIO: # memory.kmem.slabinfo throws EIO until limit_in_bytes is set. pass else: raise return stats
[ "def", "get_stats", "(", "self", ")", ":", "stats", "=", "{", "}", "for", "name", ",", "cls", "in", "self", ".", "stats", ".", "items", "(", ")", ":", "path", "=", "self", ".", "paths", "[", "name", "]", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "try", ":", "stats", "[", "name", "]", "=", "self", ".", "_PARSERS", "[", "cls", "]", "(", "fileops", ".", "read", "(", "path", ")", ")", "except", "IOError", "as", "e", ":", "# XXX: we have to distinguish unexpected errors from the expected ones", "if", "e", ".", "errno", "==", "errno", ".", "EOPNOTSUPP", ":", "# Since 3.5 memory.memsw.* are always created even if disabled.", "# If disabled we will get EOPNOTSUPP when read or write them.", "# See commit af36f906c0f4c2ffa0482ecdf856a33dc88ae8c5 of the kernel.", "pass", "if", "e", ".", "errno", "==", "errno", ".", "EIO", ":", "# memory.kmem.slabinfo throws EIO until limit_in_bytes is set.", "pass", "else", ":", "raise", "return", "stats" ]
It returns a name and a value pairs of control files which are categorised in the stats group.
[ "It", "returns", "a", "name", "and", "a", "value", "pairs", "of", "control", "files", "which", "are", "categorised", "in", "the", "stats", "group", "." ]
fd7e99f438ce334bac5669fba0d08a6502fd7a82
https://github.com/peo3/cgroup-utils/blob/fd7e99f438ce334bac5669fba0d08a6502fd7a82/cgutils/cgroup.py#L768-L791
1,115
peo3/cgroup-utils
cgutils/cgroup.py
CGroup.update
def update(self): """It updates process information of the cgroup.""" pids = fileops.readlines(self.paths['cgroup.procs']) self.pids = [int(pid) for pid in pids if pid != ''] self.n_procs = len(pids)
python
def update(self): """It updates process information of the cgroup.""" pids = fileops.readlines(self.paths['cgroup.procs']) self.pids = [int(pid) for pid in pids if pid != ''] self.n_procs = len(pids)
[ "def", "update", "(", "self", ")", ":", "pids", "=", "fileops", ".", "readlines", "(", "self", ".", "paths", "[", "'cgroup.procs'", "]", ")", "self", ".", "pids", "=", "[", "int", "(", "pid", ")", "for", "pid", "in", "pids", "if", "pid", "!=", "''", "]", "self", ".", "n_procs", "=", "len", "(", "pids", ")" ]
It updates process information of the cgroup.
[ "It", "updates", "process", "information", "of", "the", "cgroup", "." ]
fd7e99f438ce334bac5669fba0d08a6502fd7a82
https://github.com/peo3/cgroup-utils/blob/fd7e99f438ce334bac5669fba0d08a6502fd7a82/cgutils/cgroup.py#L793-L797
1,116
peo3/cgroup-utils
cgutils/cgroup.py
EventListener.wait
def wait(self): """ It returns when an event which we have configured by set_threshold happens. Note that it blocks until then. """ ret = os.read(self.event_fd, 64 / 8) return struct.unpack('Q', ret)
python
def wait(self): """ It returns when an event which we have configured by set_threshold happens. Note that it blocks until then. """ ret = os.read(self.event_fd, 64 / 8) return struct.unpack('Q', ret)
[ "def", "wait", "(", "self", ")", ":", "ret", "=", "os", ".", "read", "(", "self", ".", "event_fd", ",", "64", "/", "8", ")", "return", "struct", ".", "unpack", "(", "'Q'", ",", "ret", ")" ]
It returns when an event which we have configured by set_threshold happens. Note that it blocks until then.
[ "It", "returns", "when", "an", "event", "which", "we", "have", "configured", "by", "set_threshold", "happens", ".", "Note", "that", "it", "blocks", "until", "then", "." ]
fd7e99f438ce334bac5669fba0d08a6502fd7a82
https://github.com/peo3/cgroup-utils/blob/fd7e99f438ce334bac5669fba0d08a6502fd7a82/cgutils/cgroup.py#L887-L893
1,117
peakwinter/python-nginx
nginx.py
dumpf
def dumpf(obj, path): """ Write an nginx configuration to file. :param obj obj: nginx object (Conf, Server, Container) :param str path: path to nginx configuration on disk :returns: path the configuration was written to """ with open(path, 'w') as f: dump(obj, f) return path
python
def dumpf(obj, path): """ Write an nginx configuration to file. :param obj obj: nginx object (Conf, Server, Container) :param str path: path to nginx configuration on disk :returns: path the configuration was written to """ with open(path, 'w') as f: dump(obj, f) return path
[ "def", "dumpf", "(", "obj", ",", "path", ")", ":", "with", "open", "(", "path", ",", "'w'", ")", "as", "f", ":", "dump", "(", "obj", ",", "f", ")", "return", "path" ]
Write an nginx configuration to file. :param obj obj: nginx object (Conf, Server, Container) :param str path: path to nginx configuration on disk :returns: path the configuration was written to
[ "Write", "an", "nginx", "configuration", "to", "file", "." ]
4ecd1cd2e1f11ffb633d188a578a004712eaae16
https://github.com/peakwinter/python-nginx/blob/4ecd1cd2e1f11ffb633d188a578a004712eaae16/nginx.py#L581-L591
1,118
peakwinter/python-nginx
nginx.py
Conf.as_strings
def as_strings(self): """Return the entire Conf as nginx config strings.""" ret = [] for x in self.children: if isinstance(x, (Key, Comment)): ret.append(x.as_strings) else: for y in x.as_strings: ret.append(y) if ret: ret[-1] = re.sub('}\n+$', '}\n', ret[-1]) return ret
python
def as_strings(self): """Return the entire Conf as nginx config strings.""" ret = [] for x in self.children: if isinstance(x, (Key, Comment)): ret.append(x.as_strings) else: for y in x.as_strings: ret.append(y) if ret: ret[-1] = re.sub('}\n+$', '}\n', ret[-1]) return ret
[ "def", "as_strings", "(", "self", ")", ":", "ret", "=", "[", "]", "for", "x", "in", "self", ".", "children", ":", "if", "isinstance", "(", "x", ",", "(", "Key", ",", "Comment", ")", ")", ":", "ret", ".", "append", "(", "x", ".", "as_strings", ")", "else", ":", "for", "y", "in", "x", ".", "as_strings", ":", "ret", ".", "append", "(", "y", ")", "if", "ret", ":", "ret", "[", "-", "1", "]", "=", "re", ".", "sub", "(", "'}\\n+$'", ",", "'}\\n'", ",", "ret", "[", "-", "1", "]", ")", "return", "ret" ]
Return the entire Conf as nginx config strings.
[ "Return", "the", "entire", "Conf", "as", "nginx", "config", "strings", "." ]
4ecd1cd2e1f11ffb633d188a578a004712eaae16
https://github.com/peakwinter/python-nginx/blob/4ecd1cd2e1f11ffb633d188a578a004712eaae16/nginx.py#L98-L109
1,119
peakwinter/python-nginx
nginx.py
Container.as_list
def as_list(self): """Return all child objects in nested lists of strings.""" return [self.name, self.value, [x.as_list for x in self.children]]
python
def as_list(self): """Return all child objects in nested lists of strings.""" return [self.name, self.value, [x.as_list for x in self.children]]
[ "def", "as_list", "(", "self", ")", ":", "return", "[", "self", ".", "name", ",", "self", ".", "value", ",", "[", "x", ".", "as_list", "for", "x", "in", "self", ".", "children", "]", "]" ]
Return all child objects in nested lists of strings.
[ "Return", "all", "child", "objects", "in", "nested", "lists", "of", "strings", "." ]
4ecd1cd2e1f11ffb633d188a578a004712eaae16
https://github.com/peakwinter/python-nginx/blob/4ecd1cd2e1f11ffb633d188a578a004712eaae16/nginx.py#L190-L192
1,120
peakwinter/python-nginx
nginx.py
Container.as_dict
def as_dict(self): """Return all child objects in nested dict.""" dicts = [x.as_dict for x in self.children] return {'{0} {1}'.format(self.name, self.value): dicts}
python
def as_dict(self): """Return all child objects in nested dict.""" dicts = [x.as_dict for x in self.children] return {'{0} {1}'.format(self.name, self.value): dicts}
[ "def", "as_dict", "(", "self", ")", ":", "dicts", "=", "[", "x", ".", "as_dict", "for", "x", "in", "self", ".", "children", "]", "return", "{", "'{0} {1}'", ".", "format", "(", "self", ".", "name", ",", "self", ".", "value", ")", ":", "dicts", "}" ]
Return all child objects in nested dict.
[ "Return", "all", "child", "objects", "in", "nested", "dict", "." ]
4ecd1cd2e1f11ffb633d188a578a004712eaae16
https://github.com/peakwinter/python-nginx/blob/4ecd1cd2e1f11ffb633d188a578a004712eaae16/nginx.py#L195-L198
1,121
peakwinter/python-nginx
nginx.py
Container.as_strings
def as_strings(self): """Return the entire Container as nginx config strings.""" ret = [] container_title = (INDENT * self._depth) container_title += '{0}{1} {{\n'.format( self.name, (' {0}'.format(self.value) if self.value else '') ) ret.append(container_title) for x in self.children: if isinstance(x, Key): ret.append(INDENT + x.as_strings) elif isinstance(x, Comment): if x.inline and len(ret) >= 1: ret[-1] = ret[-1].rstrip('\n') + ' ' + x.as_strings else: ret.append(INDENT + x.as_strings) elif isinstance(x, Container): y = x.as_strings ret.append('\n' + y[0]) for z in y[1:]: ret.append(INDENT + z) else: y = x.as_strings ret.append(INDENT + y) ret[-1] = re.sub('}\n+$', '}\n', ret[-1]) ret.append('}\n\n') return ret
python
def as_strings(self): """Return the entire Container as nginx config strings.""" ret = [] container_title = (INDENT * self._depth) container_title += '{0}{1} {{\n'.format( self.name, (' {0}'.format(self.value) if self.value else '') ) ret.append(container_title) for x in self.children: if isinstance(x, Key): ret.append(INDENT + x.as_strings) elif isinstance(x, Comment): if x.inline and len(ret) >= 1: ret[-1] = ret[-1].rstrip('\n') + ' ' + x.as_strings else: ret.append(INDENT + x.as_strings) elif isinstance(x, Container): y = x.as_strings ret.append('\n' + y[0]) for z in y[1:]: ret.append(INDENT + z) else: y = x.as_strings ret.append(INDENT + y) ret[-1] = re.sub('}\n+$', '}\n', ret[-1]) ret.append('}\n\n') return ret
[ "def", "as_strings", "(", "self", ")", ":", "ret", "=", "[", "]", "container_title", "=", "(", "INDENT", "*", "self", ".", "_depth", ")", "container_title", "+=", "'{0}{1} {{\\n'", ".", "format", "(", "self", ".", "name", ",", "(", "' {0}'", ".", "format", "(", "self", ".", "value", ")", "if", "self", ".", "value", "else", "''", ")", ")", "ret", ".", "append", "(", "container_title", ")", "for", "x", "in", "self", ".", "children", ":", "if", "isinstance", "(", "x", ",", "Key", ")", ":", "ret", ".", "append", "(", "INDENT", "+", "x", ".", "as_strings", ")", "elif", "isinstance", "(", "x", ",", "Comment", ")", ":", "if", "x", ".", "inline", "and", "len", "(", "ret", ")", ">=", "1", ":", "ret", "[", "-", "1", "]", "=", "ret", "[", "-", "1", "]", ".", "rstrip", "(", "'\\n'", ")", "+", "' '", "+", "x", ".", "as_strings", "else", ":", "ret", ".", "append", "(", "INDENT", "+", "x", ".", "as_strings", ")", "elif", "isinstance", "(", "x", ",", "Container", ")", ":", "y", "=", "x", ".", "as_strings", "ret", ".", "append", "(", "'\\n'", "+", "y", "[", "0", "]", ")", "for", "z", "in", "y", "[", "1", ":", "]", ":", "ret", ".", "append", "(", "INDENT", "+", "z", ")", "else", ":", "y", "=", "x", ".", "as_strings", "ret", ".", "append", "(", "INDENT", "+", "y", ")", "ret", "[", "-", "1", "]", "=", "re", ".", "sub", "(", "'}\\n+$'", ",", "'}\\n'", ",", "ret", "[", "-", "1", "]", ")", "ret", ".", "append", "(", "'}\\n\\n'", ")", "return", "ret" ]
Return the entire Container as nginx config strings.
[ "Return", "the", "entire", "Container", "as", "nginx", "config", "strings", "." ]
4ecd1cd2e1f11ffb633d188a578a004712eaae16
https://github.com/peakwinter/python-nginx/blob/4ecd1cd2e1f11ffb633d188a578a004712eaae16/nginx.py#L201-L227
1,122
peakwinter/python-nginx
nginx.py
Key.as_strings
def as_strings(self): """Return key as nginx config string.""" if self.value == '' or self.value is None: return '{0};\n'.format(self.name) if '"' not in self.value and (';' in self.value or '#' in self.value): return '{0} "{1}";\n'.format(self.name, self.value) return '{0} {1};\n'.format(self.name, self.value)
python
def as_strings(self): """Return key as nginx config string.""" if self.value == '' or self.value is None: return '{0};\n'.format(self.name) if '"' not in self.value and (';' in self.value or '#' in self.value): return '{0} "{1}";\n'.format(self.name, self.value) return '{0} {1};\n'.format(self.name, self.value)
[ "def", "as_strings", "(", "self", ")", ":", "if", "self", ".", "value", "==", "''", "or", "self", ".", "value", "is", "None", ":", "return", "'{0};\\n'", ".", "format", "(", "self", ".", "name", ")", "if", "'\"'", "not", "in", "self", ".", "value", "and", "(", "';'", "in", "self", ".", "value", "or", "'#'", "in", "self", ".", "value", ")", ":", "return", "'{0} \"{1}\";\\n'", ".", "format", "(", "self", ".", "name", ",", "self", ".", "value", ")", "return", "'{0} {1};\\n'", ".", "format", "(", "self", ".", "name", ",", "self", ".", "value", ")" ]
Return key as nginx config string.
[ "Return", "key", "as", "nginx", "config", "string", "." ]
4ecd1cd2e1f11ffb633d188a578a004712eaae16
https://github.com/peakwinter/python-nginx/blob/4ecd1cd2e1f11ffb633d188a578a004712eaae16/nginx.py#L390-L396
1,123
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
convert_aws_args
def convert_aws_args(aws_args): """Convert old style options into arguments to boto3.session.Session.""" if not isinstance(aws_args, dict): raise errors.InvalidConfiguration( 'Elastic DocManager config option "aws" must be a dict' ) old_session_kwargs = dict( region="region_name", access_id="aws_access_key_id", secret_key="aws_secret_access_key", ) new_kwargs = {} for arg in aws_args: if arg in old_session_kwargs: new_kwargs[old_session_kwargs[arg]] = aws_args[arg] else: new_kwargs[arg] = aws_args[arg] return new_kwargs
python
def convert_aws_args(aws_args): """Convert old style options into arguments to boto3.session.Session.""" if not isinstance(aws_args, dict): raise errors.InvalidConfiguration( 'Elastic DocManager config option "aws" must be a dict' ) old_session_kwargs = dict( region="region_name", access_id="aws_access_key_id", secret_key="aws_secret_access_key", ) new_kwargs = {} for arg in aws_args: if arg in old_session_kwargs: new_kwargs[old_session_kwargs[arg]] = aws_args[arg] else: new_kwargs[arg] = aws_args[arg] return new_kwargs
[ "def", "convert_aws_args", "(", "aws_args", ")", ":", "if", "not", "isinstance", "(", "aws_args", ",", "dict", ")", ":", "raise", "errors", ".", "InvalidConfiguration", "(", "'Elastic DocManager config option \"aws\" must be a dict'", ")", "old_session_kwargs", "=", "dict", "(", "region", "=", "\"region_name\"", ",", "access_id", "=", "\"aws_access_key_id\"", ",", "secret_key", "=", "\"aws_secret_access_key\"", ",", ")", "new_kwargs", "=", "{", "}", "for", "arg", "in", "aws_args", ":", "if", "arg", "in", "old_session_kwargs", ":", "new_kwargs", "[", "old_session_kwargs", "[", "arg", "]", "]", "=", "aws_args", "[", "arg", "]", "else", ":", "new_kwargs", "[", "arg", "]", "=", "aws_args", "[", "arg", "]", "return", "new_kwargs" ]
Convert old style options into arguments to boto3.session.Session.
[ "Convert", "old", "style", "options", "into", "arguments", "to", "boto3", ".", "session", ".", "Session", "." ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L82-L99
1,124
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
DocManager._index_and_mapping
def _index_and_mapping(self, namespace): """Helper method for getting the index and type from a namespace.""" index, doc_type = namespace.split(".", 1) return index.lower(), doc_type
python
def _index_and_mapping(self, namespace): """Helper method for getting the index and type from a namespace.""" index, doc_type = namespace.split(".", 1) return index.lower(), doc_type
[ "def", "_index_and_mapping", "(", "self", ",", "namespace", ")", ":", "index", ",", "doc_type", "=", "namespace", ".", "split", "(", "\".\"", ",", "1", ")", "return", "index", ".", "lower", "(", ")", ",", "doc_type" ]
Helper method for getting the index and type from a namespace.
[ "Helper", "method", "for", "getting", "the", "index", "and", "type", "from", "a", "namespace", "." ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L226-L229
1,125
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
DocManager._stream_search
def _stream_search(self, *args, **kwargs): """Helper method for iterating over ES search results.""" for hit in scan( self.elastic, query=kwargs.pop("body", None), scroll="10m", **kwargs ): hit["_source"]["_id"] = hit["_id"] yield hit["_source"]
python
def _stream_search(self, *args, **kwargs): """Helper method for iterating over ES search results.""" for hit in scan( self.elastic, query=kwargs.pop("body", None), scroll="10m", **kwargs ): hit["_source"]["_id"] = hit["_id"] yield hit["_source"]
[ "def", "_stream_search", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "for", "hit", "in", "scan", "(", "self", ".", "elastic", ",", "query", "=", "kwargs", ".", "pop", "(", "\"body\"", ",", "None", ")", ",", "scroll", "=", "\"10m\"", ",", "*", "*", "kwargs", ")", ":", "hit", "[", "\"_source\"", "]", "[", "\"_id\"", "]", "=", "hit", "[", "\"_id\"", "]", "yield", "hit", "[", "\"_source\"", "]" ]
Helper method for iterating over ES search results.
[ "Helper", "method", "for", "iterating", "over", "ES", "search", "results", "." ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L458-L464
1,126
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
DocManager.search
def search(self, start_ts, end_ts): """Query Elasticsearch for documents in a time range. This method is used to find documents that may be in conflict during a rollback event in MongoDB. """ return self._stream_search( index=self.meta_index_name, body={"query": {"range": {"_ts": {"gte": start_ts, "lte": end_ts}}}}, )
python
def search(self, start_ts, end_ts): """Query Elasticsearch for documents in a time range. This method is used to find documents that may be in conflict during a rollback event in MongoDB. """ return self._stream_search( index=self.meta_index_name, body={"query": {"range": {"_ts": {"gte": start_ts, "lte": end_ts}}}}, )
[ "def", "search", "(", "self", ",", "start_ts", ",", "end_ts", ")", ":", "return", "self", ".", "_stream_search", "(", "index", "=", "self", ".", "meta_index_name", ",", "body", "=", "{", "\"query\"", ":", "{", "\"range\"", ":", "{", "\"_ts\"", ":", "{", "\"gte\"", ":", "start_ts", ",", "\"lte\"", ":", "end_ts", "}", "}", "}", "}", ",", ")" ]
Query Elasticsearch for documents in a time range. This method is used to find documents that may be in conflict during a rollback event in MongoDB.
[ "Query", "Elasticsearch", "for", "documents", "in", "a", "time", "range", "." ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L466-L475
1,127
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
DocManager.commit
def commit(self): """Send buffered requests and refresh all indexes.""" self.send_buffered_operations() retry_until_ok(self.elastic.indices.refresh, index="")
python
def commit(self): """Send buffered requests and refresh all indexes.""" self.send_buffered_operations() retry_until_ok(self.elastic.indices.refresh, index="")
[ "def", "commit", "(", "self", ")", ":", "self", ".", "send_buffered_operations", "(", ")", "retry_until_ok", "(", "self", ".", "elastic", ".", "indices", ".", "refresh", ",", "index", "=", "\"\"", ")" ]
Send buffered requests and refresh all indexes.
[ "Send", "buffered", "requests", "and", "refresh", "all", "indexes", "." ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L507-L510
1,128
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
BulkBuffer.add_upsert
def add_upsert(self, action, meta_action, doc_source, update_spec): """ Function which stores sources for "insert" actions and decide if for "update" action has to add docs to get source buffer """ # Whenever update_spec is provided to this method # it means that doc source needs to be retrieved # from Elasticsearch. It means also that source # is not stored in local buffer if update_spec: self.bulk_index(action, meta_action) # -1 -> to get latest index number # -1 -> to get action instead of meta_action # Update document based on source retrieved from ES self.add_doc_to_update(action, update_spec, len(self.action_buffer) - 2) else: # Insert and update operations provide source # Store it in local buffer and use for comming updates # inside same buffer # add_to_sources will not be called for delete operation # as it does not provide doc_source if doc_source: self.add_to_sources(action, doc_source) self.bulk_index(action, meta_action)
python
def add_upsert(self, action, meta_action, doc_source, update_spec): """ Function which stores sources for "insert" actions and decide if for "update" action has to add docs to get source buffer """ # Whenever update_spec is provided to this method # it means that doc source needs to be retrieved # from Elasticsearch. It means also that source # is not stored in local buffer if update_spec: self.bulk_index(action, meta_action) # -1 -> to get latest index number # -1 -> to get action instead of meta_action # Update document based on source retrieved from ES self.add_doc_to_update(action, update_spec, len(self.action_buffer) - 2) else: # Insert and update operations provide source # Store it in local buffer and use for comming updates # inside same buffer # add_to_sources will not be called for delete operation # as it does not provide doc_source if doc_source: self.add_to_sources(action, doc_source) self.bulk_index(action, meta_action)
[ "def", "add_upsert", "(", "self", ",", "action", ",", "meta_action", ",", "doc_source", ",", "update_spec", ")", ":", "# Whenever update_spec is provided to this method", "# it means that doc source needs to be retrieved", "# from Elasticsearch. It means also that source", "# is not stored in local buffer", "if", "update_spec", ":", "self", ".", "bulk_index", "(", "action", ",", "meta_action", ")", "# -1 -> to get latest index number", "# -1 -> to get action instead of meta_action", "# Update document based on source retrieved from ES", "self", ".", "add_doc_to_update", "(", "action", ",", "update_spec", ",", "len", "(", "self", ".", "action_buffer", ")", "-", "2", ")", "else", ":", "# Insert and update operations provide source", "# Store it in local buffer and use for comming updates", "# inside same buffer", "# add_to_sources will not be called for delete operation", "# as it does not provide doc_source", "if", "doc_source", ":", "self", ".", "add_to_sources", "(", "action", ",", "doc_source", ")", "self", ".", "bulk_index", "(", "action", ",", "meta_action", ")" ]
Function which stores sources for "insert" actions and decide if for "update" action has to add docs to get source buffer
[ "Function", "which", "stores", "sources", "for", "insert", "actions", "and", "decide", "if", "for", "update", "action", "has", "to", "add", "docs", "to", "get", "source", "buffer" ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L559-L585
1,129
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
BulkBuffer.add_doc_to_update
def add_doc_to_update(self, action, update_spec, action_buffer_index): """ Prepare document for update based on Elasticsearch response. Set flag if document needs to be retrieved from Elasticsearch """ doc = { "_index": action["_index"], "_type": action["_type"], "_id": action["_id"], } # If get_from_ES == True -> get document's source from Elasticsearch get_from_ES = self.should_get_id(action) self.doc_to_update.append((doc, update_spec, action_buffer_index, get_from_ES))
python
def add_doc_to_update(self, action, update_spec, action_buffer_index): """ Prepare document for update based on Elasticsearch response. Set flag if document needs to be retrieved from Elasticsearch """ doc = { "_index": action["_index"], "_type": action["_type"], "_id": action["_id"], } # If get_from_ES == True -> get document's source from Elasticsearch get_from_ES = self.should_get_id(action) self.doc_to_update.append((doc, update_spec, action_buffer_index, get_from_ES))
[ "def", "add_doc_to_update", "(", "self", ",", "action", ",", "update_spec", ",", "action_buffer_index", ")", ":", "doc", "=", "{", "\"_index\"", ":", "action", "[", "\"_index\"", "]", ",", "\"_type\"", ":", "action", "[", "\"_type\"", "]", ",", "\"_id\"", ":", "action", "[", "\"_id\"", "]", ",", "}", "# If get_from_ES == True -> get document's source from Elasticsearch", "get_from_ES", "=", "self", ".", "should_get_id", "(", "action", ")", "self", ".", "doc_to_update", ".", "append", "(", "(", "doc", ",", "update_spec", ",", "action_buffer_index", ",", "get_from_ES", ")", ")" ]
Prepare document for update based on Elasticsearch response. Set flag if document needs to be retrieved from Elasticsearch
[ "Prepare", "document", "for", "update", "based", "on", "Elasticsearch", "response", ".", "Set", "flag", "if", "document", "needs", "to", "be", "retrieved", "from", "Elasticsearch" ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L587-L601
1,130
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
BulkBuffer.get_docs_sources_from_ES
def get_docs_sources_from_ES(self): """Get document sources using MGET elasticsearch API""" docs = [doc for doc, _, _, get_from_ES in self.doc_to_update if get_from_ES] if docs: documents = self.docman.elastic.mget(body={"docs": docs}, realtime=True) return iter(documents["docs"]) else: return iter([])
python
def get_docs_sources_from_ES(self): """Get document sources using MGET elasticsearch API""" docs = [doc for doc, _, _, get_from_ES in self.doc_to_update if get_from_ES] if docs: documents = self.docman.elastic.mget(body={"docs": docs}, realtime=True) return iter(documents["docs"]) else: return iter([])
[ "def", "get_docs_sources_from_ES", "(", "self", ")", ":", "docs", "=", "[", "doc", "for", "doc", ",", "_", ",", "_", ",", "get_from_ES", "in", "self", ".", "doc_to_update", "if", "get_from_ES", "]", "if", "docs", ":", "documents", "=", "self", ".", "docman", ".", "elastic", ".", "mget", "(", "body", "=", "{", "\"docs\"", ":", "docs", "}", ",", "realtime", "=", "True", ")", "return", "iter", "(", "documents", "[", "\"docs\"", "]", ")", "else", ":", "return", "iter", "(", "[", "]", ")" ]
Get document sources using MGET elasticsearch API
[ "Get", "document", "sources", "using", "MGET", "elasticsearch", "API" ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L620-L627
1,131
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
BulkBuffer.update_sources
def update_sources(self): """Update local sources based on response from Elasticsearch""" ES_documents = self.get_docs_sources_from_ES() for doc, update_spec, action_buffer_index, get_from_ES in self.doc_to_update: if get_from_ES: # Update source based on response from ES ES_doc = next(ES_documents) if ES_doc["found"]: source = ES_doc["_source"] else: # Document not found in elasticsearch, # Seems like something went wrong during replication LOG.error( "mGET: Document id: %s has not been found " "in Elasticsearch. Due to that " "following update failed: %s", doc["_id"], update_spec, ) self.reset_action(action_buffer_index) continue else: # Get source stored locally before applying update # as it is up-to-date source = self.get_from_sources(doc["_index"], doc["_type"], doc["_id"]) if not source: LOG.error( "mGET: Document id: %s has not been found " "in local sources. Due to that following " "update failed: %s", doc["_id"], update_spec, ) self.reset_action(action_buffer_index) continue updated = self.docman.apply_update(source, update_spec) # Remove _id field from source if "_id" in updated: del updated["_id"] # Everytime update locally stored sources to keep them up-to-date self.add_to_sources(doc, updated) self.action_buffer[action_buffer_index][ "_source" ] = self.docman._formatter.format_document(updated) # Remove empty actions if there were errors self.action_buffer = [ each_action for each_action in self.action_buffer if each_action ]
python
def update_sources(self): """Update local sources based on response from Elasticsearch""" ES_documents = self.get_docs_sources_from_ES() for doc, update_spec, action_buffer_index, get_from_ES in self.doc_to_update: if get_from_ES: # Update source based on response from ES ES_doc = next(ES_documents) if ES_doc["found"]: source = ES_doc["_source"] else: # Document not found in elasticsearch, # Seems like something went wrong during replication LOG.error( "mGET: Document id: %s has not been found " "in Elasticsearch. Due to that " "following update failed: %s", doc["_id"], update_spec, ) self.reset_action(action_buffer_index) continue else: # Get source stored locally before applying update # as it is up-to-date source = self.get_from_sources(doc["_index"], doc["_type"], doc["_id"]) if not source: LOG.error( "mGET: Document id: %s has not been found " "in local sources. Due to that following " "update failed: %s", doc["_id"], update_spec, ) self.reset_action(action_buffer_index) continue updated = self.docman.apply_update(source, update_spec) # Remove _id field from source if "_id" in updated: del updated["_id"] # Everytime update locally stored sources to keep them up-to-date self.add_to_sources(doc, updated) self.action_buffer[action_buffer_index][ "_source" ] = self.docman._formatter.format_document(updated) # Remove empty actions if there were errors self.action_buffer = [ each_action for each_action in self.action_buffer if each_action ]
[ "def", "update_sources", "(", "self", ")", ":", "ES_documents", "=", "self", ".", "get_docs_sources_from_ES", "(", ")", "for", "doc", ",", "update_spec", ",", "action_buffer_index", ",", "get_from_ES", "in", "self", ".", "doc_to_update", ":", "if", "get_from_ES", ":", "# Update source based on response from ES", "ES_doc", "=", "next", "(", "ES_documents", ")", "if", "ES_doc", "[", "\"found\"", "]", ":", "source", "=", "ES_doc", "[", "\"_source\"", "]", "else", ":", "# Document not found in elasticsearch,", "# Seems like something went wrong during replication", "LOG", ".", "error", "(", "\"mGET: Document id: %s has not been found \"", "\"in Elasticsearch. Due to that \"", "\"following update failed: %s\"", ",", "doc", "[", "\"_id\"", "]", ",", "update_spec", ",", ")", "self", ".", "reset_action", "(", "action_buffer_index", ")", "continue", "else", ":", "# Get source stored locally before applying update", "# as it is up-to-date", "source", "=", "self", ".", "get_from_sources", "(", "doc", "[", "\"_index\"", "]", ",", "doc", "[", "\"_type\"", "]", ",", "doc", "[", "\"_id\"", "]", ")", "if", "not", "source", ":", "LOG", ".", "error", "(", "\"mGET: Document id: %s has not been found \"", "\"in local sources. Due to that following \"", "\"update failed: %s\"", ",", "doc", "[", "\"_id\"", "]", ",", "update_spec", ",", ")", "self", ".", "reset_action", "(", "action_buffer_index", ")", "continue", "updated", "=", "self", ".", "docman", ".", "apply_update", "(", "source", ",", "update_spec", ")", "# Remove _id field from source", "if", "\"_id\"", "in", "updated", ":", "del", "updated", "[", "\"_id\"", "]", "# Everytime update locally stored sources to keep them up-to-date", "self", ".", "add_to_sources", "(", "doc", ",", "updated", ")", "self", ".", "action_buffer", "[", "action_buffer_index", "]", "[", "\"_source\"", "]", "=", "self", ".", "docman", ".", "_formatter", ".", "format_document", "(", "updated", ")", "# Remove empty actions if there were errors", "self", ".", "action_buffer", "=", "[", "each_action", "for", "each_action", "in", "self", ".", "action_buffer", "if", "each_action", "]" ]
Update local sources based on response from Elasticsearch
[ "Update", "local", "sources", "based", "on", "response", "from", "Elasticsearch" ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L630-L683
1,132
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
BulkBuffer.add_to_sources
def add_to_sources(self, action, doc_source): """Store sources locally""" mapping = self.sources.setdefault(action["_index"], {}).setdefault( action["_type"], {} ) mapping[action["_id"]] = doc_source
python
def add_to_sources(self, action, doc_source): """Store sources locally""" mapping = self.sources.setdefault(action["_index"], {}).setdefault( action["_type"], {} ) mapping[action["_id"]] = doc_source
[ "def", "add_to_sources", "(", "self", ",", "action", ",", "doc_source", ")", ":", "mapping", "=", "self", ".", "sources", ".", "setdefault", "(", "action", "[", "\"_index\"", "]", ",", "{", "}", ")", ".", "setdefault", "(", "action", "[", "\"_type\"", "]", ",", "{", "}", ")", "mapping", "[", "action", "[", "\"_id\"", "]", "]", "=", "doc_source" ]
Store sources locally
[ "Store", "sources", "locally" ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L690-L695
1,133
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
BulkBuffer.get_from_sources
def get_from_sources(self, index, doc_type, document_id): """Get source stored locally""" return self.sources.get(index, {}).get(doc_type, {}).get(document_id, {})
python
def get_from_sources(self, index, doc_type, document_id): """Get source stored locally""" return self.sources.get(index, {}).get(doc_type, {}).get(document_id, {})
[ "def", "get_from_sources", "(", "self", ",", "index", ",", "doc_type", ",", "document_id", ")", ":", "return", "self", ".", "sources", ".", "get", "(", "index", ",", "{", "}", ")", ".", "get", "(", "doc_type", ",", "{", "}", ")", ".", "get", "(", "document_id", ",", "{", "}", ")" ]
Get source stored locally
[ "Get", "source", "stored", "locally" ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L697-L699
1,134
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
BulkBuffer.clean_up
def clean_up(self): """Do clean-up before returning buffer""" self.action_buffer = [] self.sources = {} self.doc_to_get = {} self.doc_to_update = []
python
def clean_up(self): """Do clean-up before returning buffer""" self.action_buffer = [] self.sources = {} self.doc_to_get = {} self.doc_to_update = []
[ "def", "clean_up", "(", "self", ")", ":", "self", ".", "action_buffer", "=", "[", "]", "self", ".", "sources", "=", "{", "}", "self", ".", "doc_to_get", "=", "{", "}", "self", ".", "doc_to_update", "=", "[", "]" ]
Do clean-up before returning buffer
[ "Do", "clean", "-", "up", "before", "returning", "buffer" ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L705-L710
1,135
yougov/elastic2-doc-manager
mongo_connector/doc_managers/elastic2_doc_manager.py
BulkBuffer.get_buffer
def get_buffer(self): """Get buffer which needs to be bulked to elasticsearch""" # Get sources for documents which are in Elasticsearch # and they are not in local buffer if self.doc_to_update: self.update_sources() ES_buffer = self.action_buffer self.clean_up() return ES_buffer
python
def get_buffer(self): """Get buffer which needs to be bulked to elasticsearch""" # Get sources for documents which are in Elasticsearch # and they are not in local buffer if self.doc_to_update: self.update_sources() ES_buffer = self.action_buffer self.clean_up() return ES_buffer
[ "def", "get_buffer", "(", "self", ")", ":", "# Get sources for documents which are in Elasticsearch", "# and they are not in local buffer", "if", "self", ".", "doc_to_update", ":", "self", ".", "update_sources", "(", ")", "ES_buffer", "=", "self", ".", "action_buffer", "self", ".", "clean_up", "(", ")", "return", "ES_buffer" ]
Get buffer which needs to be bulked to elasticsearch
[ "Get", "buffer", "which", "needs", "to", "be", "bulked", "to", "elasticsearch" ]
ad92138d1fd6656bb2e71cb5cc840f9ba0109c49
https://github.com/yougov/elastic2-doc-manager/blob/ad92138d1fd6656bb2e71cb5cc840f9ba0109c49/mongo_connector/doc_managers/elastic2_doc_manager.py#L712-L722
1,136
citruz/beacontools
beacontools/scanner.py
Monitor.run
def run(self): """Continously scan for BLE advertisements.""" self.socket = self.bluez.hci_open_dev(self.bt_device_id) filtr = self.bluez.hci_filter_new() self.bluez.hci_filter_all_events(filtr) self.bluez.hci_filter_set_ptype(filtr, self.bluez.HCI_EVENT_PKT) self.socket.setsockopt(self.bluez.SOL_HCI, self.bluez.HCI_FILTER, filtr) self.set_scan_parameters() self.toggle_scan(True) while self.keep_going: pkt = self.socket.recv(255) event = to_int(pkt[1]) subevent = to_int(pkt[3]) if event == LE_META_EVENT and subevent == EVT_LE_ADVERTISING_REPORT: # we have an BLE advertisement self.process_packet(pkt) self.socket.close()
python
def run(self): """Continously scan for BLE advertisements.""" self.socket = self.bluez.hci_open_dev(self.bt_device_id) filtr = self.bluez.hci_filter_new() self.bluez.hci_filter_all_events(filtr) self.bluez.hci_filter_set_ptype(filtr, self.bluez.HCI_EVENT_PKT) self.socket.setsockopt(self.bluez.SOL_HCI, self.bluez.HCI_FILTER, filtr) self.set_scan_parameters() self.toggle_scan(True) while self.keep_going: pkt = self.socket.recv(255) event = to_int(pkt[1]) subevent = to_int(pkt[3]) if event == LE_META_EVENT and subevent == EVT_LE_ADVERTISING_REPORT: # we have an BLE advertisement self.process_packet(pkt) self.socket.close()
[ "def", "run", "(", "self", ")", ":", "self", ".", "socket", "=", "self", ".", "bluez", ".", "hci_open_dev", "(", "self", ".", "bt_device_id", ")", "filtr", "=", "self", ".", "bluez", ".", "hci_filter_new", "(", ")", "self", ".", "bluez", ".", "hci_filter_all_events", "(", "filtr", ")", "self", ".", "bluez", ".", "hci_filter_set_ptype", "(", "filtr", ",", "self", ".", "bluez", ".", "HCI_EVENT_PKT", ")", "self", ".", "socket", ".", "setsockopt", "(", "self", ".", "bluez", ".", "SOL_HCI", ",", "self", ".", "bluez", ".", "HCI_FILTER", ",", "filtr", ")", "self", ".", "set_scan_parameters", "(", ")", "self", ".", "toggle_scan", "(", "True", ")", "while", "self", ".", "keep_going", ":", "pkt", "=", "self", ".", "socket", ".", "recv", "(", "255", ")", "event", "=", "to_int", "(", "pkt", "[", "1", "]", ")", "subevent", "=", "to_int", "(", "pkt", "[", "3", "]", ")", "if", "event", "==", "LE_META_EVENT", "and", "subevent", "==", "EVT_LE_ADVERTISING_REPORT", ":", "# we have an BLE advertisement", "self", ".", "process_packet", "(", "pkt", ")", "self", ".", "socket", ".", "close", "(", ")" ]
Continously scan for BLE advertisements.
[ "Continously", "scan", "for", "BLE", "advertisements", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/scanner.py#L89-L108
1,137
citruz/beacontools
beacontools/scanner.py
Monitor.set_scan_parameters
def set_scan_parameters(self, scan_type=ScanType.ACTIVE, interval_ms=10, window_ms=10, address_type=BluetoothAddressType.RANDOM, filter_type=ScanFilter.ALL): """"sets the le scan parameters Args: scan_type: ScanType.(PASSIVE|ACTIVE) interval: ms (as float) between scans (valid range 2.5ms - 10240ms) ..note:: when interval and window are equal, the scan runs continuos window: ms (as float) scan duration (valid range 2.5ms - 10240ms) address_type: Bluetooth address type BluetoothAddressType.(PUBLIC|RANDOM) * PUBLIC = use device MAC address * RANDOM = generate a random MAC address and use that filter: ScanFilter.(ALL|WHITELIST_ONLY) only ALL is supported, which will return all fetched bluetooth packets (WHITELIST_ONLY is not supported, because OCF_LE_ADD_DEVICE_TO_WHITE_LIST command is not implemented) Raises: ValueError: A value had an unexpected format or was not in range """ interval_fractions = interval_ms / MS_FRACTION_DIVIDER if interval_fractions < 0x0004 or interval_fractions > 0x4000: raise ValueError( "Invalid interval given {}, must be in range of 2.5ms to 10240ms!".format( interval_fractions)) window_fractions = window_ms / MS_FRACTION_DIVIDER if window_fractions < 0x0004 or window_fractions > 0x4000: raise ValueError( "Invalid window given {}, must be in range of 2.5ms to 10240ms!".format( window_fractions)) interval_fractions, window_fractions = int(interval_fractions), int(window_fractions) scan_parameter_pkg = struct.pack( ">BHHBB", scan_type, interval_fractions, window_fractions, address_type, filter_type) self.bluez.hci_send_cmd(self.socket, OGF_LE_CTL, OCF_LE_SET_SCAN_PARAMETERS, scan_parameter_pkg)
python
def set_scan_parameters(self, scan_type=ScanType.ACTIVE, interval_ms=10, window_ms=10, address_type=BluetoothAddressType.RANDOM, filter_type=ScanFilter.ALL): """"sets the le scan parameters Args: scan_type: ScanType.(PASSIVE|ACTIVE) interval: ms (as float) between scans (valid range 2.5ms - 10240ms) ..note:: when interval and window are equal, the scan runs continuos window: ms (as float) scan duration (valid range 2.5ms - 10240ms) address_type: Bluetooth address type BluetoothAddressType.(PUBLIC|RANDOM) * PUBLIC = use device MAC address * RANDOM = generate a random MAC address and use that filter: ScanFilter.(ALL|WHITELIST_ONLY) only ALL is supported, which will return all fetched bluetooth packets (WHITELIST_ONLY is not supported, because OCF_LE_ADD_DEVICE_TO_WHITE_LIST command is not implemented) Raises: ValueError: A value had an unexpected format or was not in range """ interval_fractions = interval_ms / MS_FRACTION_DIVIDER if interval_fractions < 0x0004 or interval_fractions > 0x4000: raise ValueError( "Invalid interval given {}, must be in range of 2.5ms to 10240ms!".format( interval_fractions)) window_fractions = window_ms / MS_FRACTION_DIVIDER if window_fractions < 0x0004 or window_fractions > 0x4000: raise ValueError( "Invalid window given {}, must be in range of 2.5ms to 10240ms!".format( window_fractions)) interval_fractions, window_fractions = int(interval_fractions), int(window_fractions) scan_parameter_pkg = struct.pack( ">BHHBB", scan_type, interval_fractions, window_fractions, address_type, filter_type) self.bluez.hci_send_cmd(self.socket, OGF_LE_CTL, OCF_LE_SET_SCAN_PARAMETERS, scan_parameter_pkg)
[ "def", "set_scan_parameters", "(", "self", ",", "scan_type", "=", "ScanType", ".", "ACTIVE", ",", "interval_ms", "=", "10", ",", "window_ms", "=", "10", ",", "address_type", "=", "BluetoothAddressType", ".", "RANDOM", ",", "filter_type", "=", "ScanFilter", ".", "ALL", ")", ":", "interval_fractions", "=", "interval_ms", "/", "MS_FRACTION_DIVIDER", "if", "interval_fractions", "<", "0x0004", "or", "interval_fractions", ">", "0x4000", ":", "raise", "ValueError", "(", "\"Invalid interval given {}, must be in range of 2.5ms to 10240ms!\"", ".", "format", "(", "interval_fractions", ")", ")", "window_fractions", "=", "window_ms", "/", "MS_FRACTION_DIVIDER", "if", "window_fractions", "<", "0x0004", "or", "window_fractions", ">", "0x4000", ":", "raise", "ValueError", "(", "\"Invalid window given {}, must be in range of 2.5ms to 10240ms!\"", ".", "format", "(", "window_fractions", ")", ")", "interval_fractions", ",", "window_fractions", "=", "int", "(", "interval_fractions", ")", ",", "int", "(", "window_fractions", ")", "scan_parameter_pkg", "=", "struct", ".", "pack", "(", "\">BHHBB\"", ",", "scan_type", ",", "interval_fractions", ",", "window_fractions", ",", "address_type", ",", "filter_type", ")", "self", ".", "bluez", ".", "hci_send_cmd", "(", "self", ".", "socket", ",", "OGF_LE_CTL", ",", "OCF_LE_SET_SCAN_PARAMETERS", ",", "scan_parameter_pkg", ")" ]
sets the le scan parameters Args: scan_type: ScanType.(PASSIVE|ACTIVE) interval: ms (as float) between scans (valid range 2.5ms - 10240ms) ..note:: when interval and window are equal, the scan runs continuos window: ms (as float) scan duration (valid range 2.5ms - 10240ms) address_type: Bluetooth address type BluetoothAddressType.(PUBLIC|RANDOM) * PUBLIC = use device MAC address * RANDOM = generate a random MAC address and use that filter: ScanFilter.(ALL|WHITELIST_ONLY) only ALL is supported, which will return all fetched bluetooth packets (WHITELIST_ONLY is not supported, because OCF_LE_ADD_DEVICE_TO_WHITE_LIST command is not implemented) Raises: ValueError: A value had an unexpected format or was not in range
[ "sets", "the", "le", "scan", "parameters" ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/scanner.py#L110-L151
1,138
citruz/beacontools
beacontools/scanner.py
Monitor.toggle_scan
def toggle_scan(self, enable, filter_duplicates=False): """Enables or disables BLE scanning Args: enable: boolean value to enable (True) or disable (False) scanner filter_duplicates: boolean value to enable/disable filter, that omits duplicated packets""" command = struct.pack(">BB", enable, filter_duplicates) self.bluez.hci_send_cmd(self.socket, OGF_LE_CTL, OCF_LE_SET_SCAN_ENABLE, command)
python
def toggle_scan(self, enable, filter_duplicates=False): """Enables or disables BLE scanning Args: enable: boolean value to enable (True) or disable (False) scanner filter_duplicates: boolean value to enable/disable filter, that omits duplicated packets""" command = struct.pack(">BB", enable, filter_duplicates) self.bluez.hci_send_cmd(self.socket, OGF_LE_CTL, OCF_LE_SET_SCAN_ENABLE, command)
[ "def", "toggle_scan", "(", "self", ",", "enable", ",", "filter_duplicates", "=", "False", ")", ":", "command", "=", "struct", ".", "pack", "(", "\">BB\"", ",", "enable", ",", "filter_duplicates", ")", "self", ".", "bluez", ".", "hci_send_cmd", "(", "self", ".", "socket", ",", "OGF_LE_CTL", ",", "OCF_LE_SET_SCAN_ENABLE", ",", "command", ")" ]
Enables or disables BLE scanning Args: enable: boolean value to enable (True) or disable (False) scanner filter_duplicates: boolean value to enable/disable filter, that omits duplicated packets
[ "Enables", "or", "disables", "BLE", "scanning" ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/scanner.py#L153-L161
1,139
citruz/beacontools
beacontools/scanner.py
Monitor.process_packet
def process_packet(self, pkt): """Parse the packet and call callback if one of the filters matches.""" # check if this could be a valid packet before parsing # this reduces the CPU load significantly if not ( \ ((self.mode & ScannerMode.MODE_IBEACON) and (pkt[19:23] == b"\x4c\x00\x02\x15")) or \ ((self.mode & ScannerMode.MODE_EDDYSTONE) and (pkt[19:21] == b"\xaa\xfe")) or \ ((self.mode & ScannerMode.MODE_ESTIMOTE) and (pkt[19:21] == b"\x9a\xfe"))): return bt_addr = bt_addr_to_string(pkt[7:13]) rssi = bin_to_int(pkt[-1]) # strip bluetooth address and parse packet packet = parse_packet(pkt[14:-1]) # return if packet was not an beacon advertisement if not packet: return # we need to remeber which eddystone beacon has which bt address # because the TLM and URL frames do not contain the namespace and instance self.save_bt_addr(packet, bt_addr) # properties holds the identifying information for a beacon # e.g. instance and namespace for eddystone; uuid, major, minor for iBeacon properties = self.get_properties(packet, bt_addr) if self.device_filter is None and self.packet_filter is None: # no filters selected self.callback(bt_addr, rssi, packet, properties) elif self.device_filter is None: # filter by packet type if is_one_of(packet, self.packet_filter): self.callback(bt_addr, rssi, packet, properties) else: # filter by device and packet type if self.packet_filter and not is_one_of(packet, self.packet_filter): # return if packet filter does not match return # iterate over filters and call .matches() on each for filtr in self.device_filter: if isinstance(filtr, BtAddrFilter): if filtr.matches({'bt_addr':bt_addr}): self.callback(bt_addr, rssi, packet, properties) return elif filtr.matches(properties): self.callback(bt_addr, rssi, packet, properties) return
python
def process_packet(self, pkt): """Parse the packet and call callback if one of the filters matches.""" # check if this could be a valid packet before parsing # this reduces the CPU load significantly if not ( \ ((self.mode & ScannerMode.MODE_IBEACON) and (pkt[19:23] == b"\x4c\x00\x02\x15")) or \ ((self.mode & ScannerMode.MODE_EDDYSTONE) and (pkt[19:21] == b"\xaa\xfe")) or \ ((self.mode & ScannerMode.MODE_ESTIMOTE) and (pkt[19:21] == b"\x9a\xfe"))): return bt_addr = bt_addr_to_string(pkt[7:13]) rssi = bin_to_int(pkt[-1]) # strip bluetooth address and parse packet packet = parse_packet(pkt[14:-1]) # return if packet was not an beacon advertisement if not packet: return # we need to remeber which eddystone beacon has which bt address # because the TLM and URL frames do not contain the namespace and instance self.save_bt_addr(packet, bt_addr) # properties holds the identifying information for a beacon # e.g. instance and namespace for eddystone; uuid, major, minor for iBeacon properties = self.get_properties(packet, bt_addr) if self.device_filter is None and self.packet_filter is None: # no filters selected self.callback(bt_addr, rssi, packet, properties) elif self.device_filter is None: # filter by packet type if is_one_of(packet, self.packet_filter): self.callback(bt_addr, rssi, packet, properties) else: # filter by device and packet type if self.packet_filter and not is_one_of(packet, self.packet_filter): # return if packet filter does not match return # iterate over filters and call .matches() on each for filtr in self.device_filter: if isinstance(filtr, BtAddrFilter): if filtr.matches({'bt_addr':bt_addr}): self.callback(bt_addr, rssi, packet, properties) return elif filtr.matches(properties): self.callback(bt_addr, rssi, packet, properties) return
[ "def", "process_packet", "(", "self", ",", "pkt", ")", ":", "# check if this could be a valid packet before parsing", "# this reduces the CPU load significantly", "if", "not", "(", "(", "(", "self", ".", "mode", "&", "ScannerMode", ".", "MODE_IBEACON", ")", "and", "(", "pkt", "[", "19", ":", "23", "]", "==", "b\"\\x4c\\x00\\x02\\x15\"", ")", ")", "or", "(", "(", "self", ".", "mode", "&", "ScannerMode", ".", "MODE_EDDYSTONE", ")", "and", "(", "pkt", "[", "19", ":", "21", "]", "==", "b\"\\xaa\\xfe\"", ")", ")", "or", "(", "(", "self", ".", "mode", "&", "ScannerMode", ".", "MODE_ESTIMOTE", ")", "and", "(", "pkt", "[", "19", ":", "21", "]", "==", "b\"\\x9a\\xfe\"", ")", ")", ")", ":", "return", "bt_addr", "=", "bt_addr_to_string", "(", "pkt", "[", "7", ":", "13", "]", ")", "rssi", "=", "bin_to_int", "(", "pkt", "[", "-", "1", "]", ")", "# strip bluetooth address and parse packet", "packet", "=", "parse_packet", "(", "pkt", "[", "14", ":", "-", "1", "]", ")", "# return if packet was not an beacon advertisement", "if", "not", "packet", ":", "return", "# we need to remeber which eddystone beacon has which bt address", "# because the TLM and URL frames do not contain the namespace and instance", "self", ".", "save_bt_addr", "(", "packet", ",", "bt_addr", ")", "# properties holds the identifying information for a beacon", "# e.g. instance and namespace for eddystone; uuid, major, minor for iBeacon", "properties", "=", "self", ".", "get_properties", "(", "packet", ",", "bt_addr", ")", "if", "self", ".", "device_filter", "is", "None", "and", "self", ".", "packet_filter", "is", "None", ":", "# no filters selected", "self", ".", "callback", "(", "bt_addr", ",", "rssi", ",", "packet", ",", "properties", ")", "elif", "self", ".", "device_filter", "is", "None", ":", "# filter by packet type", "if", "is_one_of", "(", "packet", ",", "self", ".", "packet_filter", ")", ":", "self", ".", "callback", "(", "bt_addr", ",", "rssi", ",", "packet", ",", "properties", ")", "else", ":", "# filter by device and packet type", "if", "self", ".", "packet_filter", "and", "not", "is_one_of", "(", "packet", ",", "self", ".", "packet_filter", ")", ":", "# return if packet filter does not match", "return", "# iterate over filters and call .matches() on each", "for", "filtr", "in", "self", ".", "device_filter", ":", "if", "isinstance", "(", "filtr", ",", "BtAddrFilter", ")", ":", "if", "filtr", ".", "matches", "(", "{", "'bt_addr'", ":", "bt_addr", "}", ")", ":", "self", ".", "callback", "(", "bt_addr", ",", "rssi", ",", "packet", ",", "properties", ")", "return", "elif", "filtr", ".", "matches", "(", "properties", ")", ":", "self", ".", "callback", "(", "bt_addr", ",", "rssi", ",", "packet", ",", "properties", ")", "return" ]
Parse the packet and call callback if one of the filters matches.
[ "Parse", "the", "packet", "and", "call", "callback", "if", "one", "of", "the", "filters", "matches", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/scanner.py#L163-L213
1,140
citruz/beacontools
beacontools/scanner.py
Monitor.save_bt_addr
def save_bt_addr(self, packet, bt_addr): """Add to the list of mappings.""" if isinstance(packet, EddystoneUIDFrame): # remove out old mapping new_mappings = [m for m in self.eddystone_mappings if m[0] != bt_addr] new_mappings.append((bt_addr, packet.properties)) self.eddystone_mappings = new_mappings
python
def save_bt_addr(self, packet, bt_addr): """Add to the list of mappings.""" if isinstance(packet, EddystoneUIDFrame): # remove out old mapping new_mappings = [m for m in self.eddystone_mappings if m[0] != bt_addr] new_mappings.append((bt_addr, packet.properties)) self.eddystone_mappings = new_mappings
[ "def", "save_bt_addr", "(", "self", ",", "packet", ",", "bt_addr", ")", ":", "if", "isinstance", "(", "packet", ",", "EddystoneUIDFrame", ")", ":", "# remove out old mapping", "new_mappings", "=", "[", "m", "for", "m", "in", "self", ".", "eddystone_mappings", "if", "m", "[", "0", "]", "!=", "bt_addr", "]", "new_mappings", ".", "append", "(", "(", "bt_addr", ",", "packet", ".", "properties", ")", ")", "self", ".", "eddystone_mappings", "=", "new_mappings" ]
Add to the list of mappings.
[ "Add", "to", "the", "list", "of", "mappings", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/scanner.py#L215-L221
1,141
citruz/beacontools
beacontools/scanner.py
Monitor.get_properties
def get_properties(self, packet, bt_addr): """Get properties of beacon depending on type.""" if is_one_of(packet, [EddystoneTLMFrame, EddystoneURLFrame, \ EddystoneEncryptedTLMFrame, EddystoneEIDFrame]): # here we retrieve the namespace and instance which corresponds to the # eddystone beacon with this bt address return self.properties_from_mapping(bt_addr) else: return packet.properties
python
def get_properties(self, packet, bt_addr): """Get properties of beacon depending on type.""" if is_one_of(packet, [EddystoneTLMFrame, EddystoneURLFrame, \ EddystoneEncryptedTLMFrame, EddystoneEIDFrame]): # here we retrieve the namespace and instance which corresponds to the # eddystone beacon with this bt address return self.properties_from_mapping(bt_addr) else: return packet.properties
[ "def", "get_properties", "(", "self", ",", "packet", ",", "bt_addr", ")", ":", "if", "is_one_of", "(", "packet", ",", "[", "EddystoneTLMFrame", ",", "EddystoneURLFrame", ",", "EddystoneEncryptedTLMFrame", ",", "EddystoneEIDFrame", "]", ")", ":", "# here we retrieve the namespace and instance which corresponds to the", "# eddystone beacon with this bt address", "return", "self", ".", "properties_from_mapping", "(", "bt_addr", ")", "else", ":", "return", "packet", ".", "properties" ]
Get properties of beacon depending on type.
[ "Get", "properties", "of", "beacon", "depending", "on", "type", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/scanner.py#L223-L231
1,142
citruz/beacontools
beacontools/scanner.py
Monitor.terminate
def terminate(self): """Signal runner to stop and join thread.""" self.toggle_scan(False) self.keep_going = False self.join()
python
def terminate(self): """Signal runner to stop and join thread.""" self.toggle_scan(False) self.keep_going = False self.join()
[ "def", "terminate", "(", "self", ")", ":", "self", ".", "toggle_scan", "(", "False", ")", "self", ".", "keep_going", "=", "False", "self", ".", "join", "(", ")" ]
Signal runner to stop and join thread.
[ "Signal", "runner", "to", "stop", "and", "join", "thread", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/scanner.py#L240-L244
1,143
citruz/beacontools
beacontools/utils.py
data_to_uuid
def data_to_uuid(data): """Convert an array of binary data to the iBeacon uuid format.""" string = data_to_hexstring(data) return string[0:8]+'-'+string[8:12]+'-'+string[12:16]+'-'+string[16:20]+'-'+string[20:32]
python
def data_to_uuid(data): """Convert an array of binary data to the iBeacon uuid format.""" string = data_to_hexstring(data) return string[0:8]+'-'+string[8:12]+'-'+string[12:16]+'-'+string[16:20]+'-'+string[20:32]
[ "def", "data_to_uuid", "(", "data", ")", ":", "string", "=", "data_to_hexstring", "(", "data", ")", "return", "string", "[", "0", ":", "8", "]", "+", "'-'", "+", "string", "[", "8", ":", "12", "]", "+", "'-'", "+", "string", "[", "12", ":", "16", "]", "+", "'-'", "+", "string", "[", "16", ":", "20", "]", "+", "'-'", "+", "string", "[", "20", ":", "32", "]" ]
Convert an array of binary data to the iBeacon uuid format.
[ "Convert", "an", "array", "of", "binary", "data", "to", "the", "iBeacon", "uuid", "format", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/utils.py#L24-L27
1,144
citruz/beacontools
beacontools/utils.py
bt_addr_to_string
def bt_addr_to_string(addr): """Convert a binary string to the hex representation.""" addr_str = array.array('B', addr) addr_str.reverse() hex_str = hexlify(addr_str.tostring()).decode('ascii') # insert ":" seperator between the bytes return ':'.join(a+b for a, b in zip(hex_str[::2], hex_str[1::2]))
python
def bt_addr_to_string(addr): """Convert a binary string to the hex representation.""" addr_str = array.array('B', addr) addr_str.reverse() hex_str = hexlify(addr_str.tostring()).decode('ascii') # insert ":" seperator between the bytes return ':'.join(a+b for a, b in zip(hex_str[::2], hex_str[1::2]))
[ "def", "bt_addr_to_string", "(", "addr", ")", ":", "addr_str", "=", "array", ".", "array", "(", "'B'", ",", "addr", ")", "addr_str", ".", "reverse", "(", ")", "hex_str", "=", "hexlify", "(", "addr_str", ".", "tostring", "(", ")", ")", ".", "decode", "(", "'ascii'", ")", "# insert \":\" seperator between the bytes", "return", "':'", ".", "join", "(", "a", "+", "b", "for", "a", ",", "b", "in", "zip", "(", "hex_str", "[", ":", ":", "2", "]", ",", "hex_str", "[", "1", ":", ":", "2", "]", ")", ")" ]
Convert a binary string to the hex representation.
[ "Convert", "a", "binary", "string", "to", "the", "hex", "representation", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/utils.py#L35-L41
1,145
citruz/beacontools
beacontools/utils.py
is_one_of
def is_one_of(obj, types): """Return true iff obj is an instance of one of the types.""" for type_ in types: if isinstance(obj, type_): return True return False
python
def is_one_of(obj, types): """Return true iff obj is an instance of one of the types.""" for type_ in types: if isinstance(obj, type_): return True return False
[ "def", "is_one_of", "(", "obj", ",", "types", ")", ":", "for", "type_", "in", "types", ":", "if", "isinstance", "(", "obj", ",", "type_", ")", ":", "return", "True", "return", "False" ]
Return true iff obj is an instance of one of the types.
[ "Return", "true", "iff", "obj", "is", "an", "instance", "of", "one", "of", "the", "types", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/utils.py#L44-L49
1,146
citruz/beacontools
beacontools/utils.py
is_packet_type
def is_packet_type(cls): """Check if class is one the packet types.""" from .packet_types import EddystoneUIDFrame, EddystoneURLFrame, \ EddystoneEncryptedTLMFrame, EddystoneTLMFrame, \ EddystoneEIDFrame, IBeaconAdvertisement, \ EstimoteTelemetryFrameA, EstimoteTelemetryFrameB return (cls in [EddystoneURLFrame, EddystoneUIDFrame, EddystoneEncryptedTLMFrame, \ EddystoneTLMFrame, EddystoneEIDFrame, IBeaconAdvertisement, \ EstimoteTelemetryFrameA, EstimoteTelemetryFrameB])
python
def is_packet_type(cls): """Check if class is one the packet types.""" from .packet_types import EddystoneUIDFrame, EddystoneURLFrame, \ EddystoneEncryptedTLMFrame, EddystoneTLMFrame, \ EddystoneEIDFrame, IBeaconAdvertisement, \ EstimoteTelemetryFrameA, EstimoteTelemetryFrameB return (cls in [EddystoneURLFrame, EddystoneUIDFrame, EddystoneEncryptedTLMFrame, \ EddystoneTLMFrame, EddystoneEIDFrame, IBeaconAdvertisement, \ EstimoteTelemetryFrameA, EstimoteTelemetryFrameB])
[ "def", "is_packet_type", "(", "cls", ")", ":", "from", ".", "packet_types", "import", "EddystoneUIDFrame", ",", "EddystoneURLFrame", ",", "EddystoneEncryptedTLMFrame", ",", "EddystoneTLMFrame", ",", "EddystoneEIDFrame", ",", "IBeaconAdvertisement", ",", "EstimoteTelemetryFrameA", ",", "EstimoteTelemetryFrameB", "return", "(", "cls", "in", "[", "EddystoneURLFrame", ",", "EddystoneUIDFrame", ",", "EddystoneEncryptedTLMFrame", ",", "EddystoneTLMFrame", ",", "EddystoneEIDFrame", ",", "IBeaconAdvertisement", ",", "EstimoteTelemetryFrameA", ",", "EstimoteTelemetryFrameB", "]", ")" ]
Check if class is one the packet types.
[ "Check", "if", "class", "is", "one", "the", "packet", "types", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/utils.py#L52-L60
1,147
citruz/beacontools
beacontools/utils.py
bin_to_int
def bin_to_int(string): """Convert a one element byte string to signed int for python 2 support.""" if isinstance(string, str): return struct.unpack("b", string)[0] else: return struct.unpack("b", bytes([string]))[0]
python
def bin_to_int(string): """Convert a one element byte string to signed int for python 2 support.""" if isinstance(string, str): return struct.unpack("b", string)[0] else: return struct.unpack("b", bytes([string]))[0]
[ "def", "bin_to_int", "(", "string", ")", ":", "if", "isinstance", "(", "string", ",", "str", ")", ":", "return", "struct", ".", "unpack", "(", "\"b\"", ",", "string", ")", "[", "0", "]", "else", ":", "return", "struct", ".", "unpack", "(", "\"b\"", ",", "bytes", "(", "[", "string", "]", ")", ")", "[", "0", "]" ]
Convert a one element byte string to signed int for python 2 support.
[ "Convert", "a", "one", "element", "byte", "string", "to", "signed", "int", "for", "python", "2", "support", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/utils.py#L71-L76
1,148
citruz/beacontools
beacontools/utils.py
get_mode
def get_mode(device_filter): """Determine which beacons the scanner should look for.""" from .device_filters import IBeaconFilter, EddystoneFilter, BtAddrFilter, EstimoteFilter if device_filter is None or len(device_filter) == 0: return ScannerMode.MODE_ALL mode = ScannerMode.MODE_NONE for filtr in device_filter: if isinstance(filtr, IBeaconFilter): mode |= ScannerMode.MODE_IBEACON elif isinstance(filtr, EddystoneFilter): mode |= ScannerMode.MODE_EDDYSTONE elif isinstance(filtr, EstimoteFilter): mode |= ScannerMode.MODE_ESTIMOTE elif isinstance(filtr, BtAddrFilter): mode |= ScannerMode.MODE_ALL break return mode
python
def get_mode(device_filter): """Determine which beacons the scanner should look for.""" from .device_filters import IBeaconFilter, EddystoneFilter, BtAddrFilter, EstimoteFilter if device_filter is None or len(device_filter) == 0: return ScannerMode.MODE_ALL mode = ScannerMode.MODE_NONE for filtr in device_filter: if isinstance(filtr, IBeaconFilter): mode |= ScannerMode.MODE_IBEACON elif isinstance(filtr, EddystoneFilter): mode |= ScannerMode.MODE_EDDYSTONE elif isinstance(filtr, EstimoteFilter): mode |= ScannerMode.MODE_ESTIMOTE elif isinstance(filtr, BtAddrFilter): mode |= ScannerMode.MODE_ALL break return mode
[ "def", "get_mode", "(", "device_filter", ")", ":", "from", ".", "device_filters", "import", "IBeaconFilter", ",", "EddystoneFilter", ",", "BtAddrFilter", ",", "EstimoteFilter", "if", "device_filter", "is", "None", "or", "len", "(", "device_filter", ")", "==", "0", ":", "return", "ScannerMode", ".", "MODE_ALL", "mode", "=", "ScannerMode", ".", "MODE_NONE", "for", "filtr", "in", "device_filter", ":", "if", "isinstance", "(", "filtr", ",", "IBeaconFilter", ")", ":", "mode", "|=", "ScannerMode", ".", "MODE_IBEACON", "elif", "isinstance", "(", "filtr", ",", "EddystoneFilter", ")", ":", "mode", "|=", "ScannerMode", ".", "MODE_EDDYSTONE", "elif", "isinstance", "(", "filtr", ",", "EstimoteFilter", ")", ":", "mode", "|=", "ScannerMode", ".", "MODE_ESTIMOTE", "elif", "isinstance", "(", "filtr", ",", "BtAddrFilter", ")", ":", "mode", "|=", "ScannerMode", ".", "MODE_ALL", "break", "return", "mode" ]
Determine which beacons the scanner should look for.
[ "Determine", "which", "beacons", "the", "scanner", "should", "look", "for", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/utils.py#L79-L97
1,149
citruz/beacontools
beacontools/device_filters.py
DeviceFilter.matches
def matches(self, filter_props): """Check if the filter matches the supplied properties.""" if filter_props is None: return False found_one = False for key, value in filter_props.items(): if key in self.properties and value != self.properties[key]: return False elif key in self.properties and value == self.properties[key]: found_one = True return found_one
python
def matches(self, filter_props): """Check if the filter matches the supplied properties.""" if filter_props is None: return False found_one = False for key, value in filter_props.items(): if key in self.properties and value != self.properties[key]: return False elif key in self.properties and value == self.properties[key]: found_one = True return found_one
[ "def", "matches", "(", "self", ",", "filter_props", ")", ":", "if", "filter_props", "is", "None", ":", "return", "False", "found_one", "=", "False", "for", "key", ",", "value", "in", "filter_props", ".", "items", "(", ")", ":", "if", "key", "in", "self", ".", "properties", "and", "value", "!=", "self", ".", "properties", "[", "key", "]", ":", "return", "False", "elif", "key", "in", "self", ".", "properties", "and", "value", "==", "self", ".", "properties", "[", "key", "]", ":", "found_one", "=", "True", "return", "found_one" ]
Check if the filter matches the supplied properties.
[ "Check", "if", "the", "filter", "matches", "the", "supplied", "properties", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/device_filters.py#L13-L25
1,150
citruz/beacontools
beacontools/parser.py
parse_packet
def parse_packet(packet): """Parse a beacon advertisement packet.""" frame = parse_ltv_packet(packet) if frame is None: frame = parse_ibeacon_packet(packet) return frame
python
def parse_packet(packet): """Parse a beacon advertisement packet.""" frame = parse_ltv_packet(packet) if frame is None: frame = parse_ibeacon_packet(packet) return frame
[ "def", "parse_packet", "(", "packet", ")", ":", "frame", "=", "parse_ltv_packet", "(", "packet", ")", "if", "frame", "is", "None", ":", "frame", "=", "parse_ibeacon_packet", "(", "packet", ")", "return", "frame" ]
Parse a beacon advertisement packet.
[ "Parse", "a", "beacon", "advertisement", "packet", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/parser.py#L14-L19
1,151
citruz/beacontools
beacontools/parser.py
parse_ltv_packet
def parse_ltv_packet(packet): """Parse a tag-length-value style beacon packet.""" try: frame = LTVFrame.parse(packet) for ltv in frame: if ltv['type'] == SERVICE_DATA_TYPE: data = ltv['value'] if data["service_identifier"] == EDDYSTONE_UUID: return parse_eddystone_service_data(data) elif data["service_identifier"] == ESTIMOTE_UUID: return parse_estimote_service_data(data) except ConstructError: return None return None
python
def parse_ltv_packet(packet): """Parse a tag-length-value style beacon packet.""" try: frame = LTVFrame.parse(packet) for ltv in frame: if ltv['type'] == SERVICE_DATA_TYPE: data = ltv['value'] if data["service_identifier"] == EDDYSTONE_UUID: return parse_eddystone_service_data(data) elif data["service_identifier"] == ESTIMOTE_UUID: return parse_estimote_service_data(data) except ConstructError: return None return None
[ "def", "parse_ltv_packet", "(", "packet", ")", ":", "try", ":", "frame", "=", "LTVFrame", ".", "parse", "(", "packet", ")", "for", "ltv", "in", "frame", ":", "if", "ltv", "[", "'type'", "]", "==", "SERVICE_DATA_TYPE", ":", "data", "=", "ltv", "[", "'value'", "]", "if", "data", "[", "\"service_identifier\"", "]", "==", "EDDYSTONE_UUID", ":", "return", "parse_eddystone_service_data", "(", "data", ")", "elif", "data", "[", "\"service_identifier\"", "]", "==", "ESTIMOTE_UUID", ":", "return", "parse_estimote_service_data", "(", "data", ")", "except", "ConstructError", ":", "return", "None", "return", "None" ]
Parse a tag-length-value style beacon packet.
[ "Parse", "a", "tag", "-", "length", "-", "value", "style", "beacon", "packet", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/parser.py#L21-L38
1,152
citruz/beacontools
beacontools/parser.py
parse_eddystone_service_data
def parse_eddystone_service_data(data): """Parse Eddystone service data.""" if data['frame_type'] == EDDYSTONE_UID_FRAME: return EddystoneUIDFrame(data['frame']) elif data['frame_type'] == EDDYSTONE_TLM_FRAME: if data['frame']['tlm_version'] == EDDYSTONE_TLM_ENCRYPTED: return EddystoneEncryptedTLMFrame(data['frame']['data']) elif data['frame']['tlm_version'] == EDDYSTONE_TLM_UNENCRYPTED: return EddystoneTLMFrame(data['frame']['data']) elif data['frame_type'] == EDDYSTONE_URL_FRAME: return EddystoneURLFrame(data['frame']) elif data['frame_type'] == EDDYSTONE_EID_FRAME: return EddystoneEIDFrame(data['frame']) else: return None
python
def parse_eddystone_service_data(data): """Parse Eddystone service data.""" if data['frame_type'] == EDDYSTONE_UID_FRAME: return EddystoneUIDFrame(data['frame']) elif data['frame_type'] == EDDYSTONE_TLM_FRAME: if data['frame']['tlm_version'] == EDDYSTONE_TLM_ENCRYPTED: return EddystoneEncryptedTLMFrame(data['frame']['data']) elif data['frame']['tlm_version'] == EDDYSTONE_TLM_UNENCRYPTED: return EddystoneTLMFrame(data['frame']['data']) elif data['frame_type'] == EDDYSTONE_URL_FRAME: return EddystoneURLFrame(data['frame']) elif data['frame_type'] == EDDYSTONE_EID_FRAME: return EddystoneEIDFrame(data['frame']) else: return None
[ "def", "parse_eddystone_service_data", "(", "data", ")", ":", "if", "data", "[", "'frame_type'", "]", "==", "EDDYSTONE_UID_FRAME", ":", "return", "EddystoneUIDFrame", "(", "data", "[", "'frame'", "]", ")", "elif", "data", "[", "'frame_type'", "]", "==", "EDDYSTONE_TLM_FRAME", ":", "if", "data", "[", "'frame'", "]", "[", "'tlm_version'", "]", "==", "EDDYSTONE_TLM_ENCRYPTED", ":", "return", "EddystoneEncryptedTLMFrame", "(", "data", "[", "'frame'", "]", "[", "'data'", "]", ")", "elif", "data", "[", "'frame'", "]", "[", "'tlm_version'", "]", "==", "EDDYSTONE_TLM_UNENCRYPTED", ":", "return", "EddystoneTLMFrame", "(", "data", "[", "'frame'", "]", "[", "'data'", "]", ")", "elif", "data", "[", "'frame_type'", "]", "==", "EDDYSTONE_URL_FRAME", ":", "return", "EddystoneURLFrame", "(", "data", "[", "'frame'", "]", ")", "elif", "data", "[", "'frame_type'", "]", "==", "EDDYSTONE_EID_FRAME", ":", "return", "EddystoneEIDFrame", "(", "data", "[", "'frame'", "]", ")", "else", ":", "return", "None" ]
Parse Eddystone service data.
[ "Parse", "Eddystone", "service", "data", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/parser.py#L40-L57
1,153
citruz/beacontools
beacontools/parser.py
parse_estimote_service_data
def parse_estimote_service_data(data): """Parse Estimote service data.""" if data['frame_type'] & 0xF == ESTIMOTE_TELEMETRY_FRAME: protocol_version = (data['frame_type'] & 0xF0) >> 4 if data['frame']['subframe_type'] == ESTIMOTE_TELEMETRY_SUBFRAME_A: return EstimoteTelemetryFrameA(data['frame'], protocol_version) elif data['frame']['subframe_type'] == ESTIMOTE_TELEMETRY_SUBFRAME_B: return EstimoteTelemetryFrameB(data['frame'], protocol_version) return None
python
def parse_estimote_service_data(data): """Parse Estimote service data.""" if data['frame_type'] & 0xF == ESTIMOTE_TELEMETRY_FRAME: protocol_version = (data['frame_type'] & 0xF0) >> 4 if data['frame']['subframe_type'] == ESTIMOTE_TELEMETRY_SUBFRAME_A: return EstimoteTelemetryFrameA(data['frame'], protocol_version) elif data['frame']['subframe_type'] == ESTIMOTE_TELEMETRY_SUBFRAME_B: return EstimoteTelemetryFrameB(data['frame'], protocol_version) return None
[ "def", "parse_estimote_service_data", "(", "data", ")", ":", "if", "data", "[", "'frame_type'", "]", "&", "0xF", "==", "ESTIMOTE_TELEMETRY_FRAME", ":", "protocol_version", "=", "(", "data", "[", "'frame_type'", "]", "&", "0xF0", ")", ">>", "4", "if", "data", "[", "'frame'", "]", "[", "'subframe_type'", "]", "==", "ESTIMOTE_TELEMETRY_SUBFRAME_A", ":", "return", "EstimoteTelemetryFrameA", "(", "data", "[", "'frame'", "]", ",", "protocol_version", ")", "elif", "data", "[", "'frame'", "]", "[", "'subframe_type'", "]", "==", "ESTIMOTE_TELEMETRY_SUBFRAME_B", ":", "return", "EstimoteTelemetryFrameB", "(", "data", "[", "'frame'", "]", ",", "protocol_version", ")", "return", "None" ]
Parse Estimote service data.
[ "Parse", "Estimote", "service", "data", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/parser.py#L59-L67
1,154
citruz/beacontools
beacontools/packet_types/estimote.py
EstimoteTelemetryFrameA.parse_motion_state
def parse_motion_state(val): """Convert motion state byte to seconds.""" number = val & 0b00111111 unit = (val & 0b11000000) >> 6 if unit == 1: number *= 60 # minutes elif unit == 2: number *= 60 * 60 # hours elif unit == 3 and number < 32: number *= 60 * 60 * 24 # days elif unit == 3: number -= 32 number *= 60 * 60 * 24 * 7 # weeks return number
python
def parse_motion_state(val): """Convert motion state byte to seconds.""" number = val & 0b00111111 unit = (val & 0b11000000) >> 6 if unit == 1: number *= 60 # minutes elif unit == 2: number *= 60 * 60 # hours elif unit == 3 and number < 32: number *= 60 * 60 * 24 # days elif unit == 3: number -= 32 number *= 60 * 60 * 24 * 7 # weeks return number
[ "def", "parse_motion_state", "(", "val", ")", ":", "number", "=", "val", "&", "0b00111111", "unit", "=", "(", "val", "&", "0b11000000", ")", ">>", "6", "if", "unit", "==", "1", ":", "number", "*=", "60", "# minutes", "elif", "unit", "==", "2", ":", "number", "*=", "60", "*", "60", "# hours", "elif", "unit", "==", "3", "and", "number", "<", "32", ":", "number", "*=", "60", "*", "60", "*", "24", "# days", "elif", "unit", "==", "3", ":", "number", "-=", "32", "number", "*=", "60", "*", "60", "*", "24", "*", "7", "# weeks", "return", "number" ]
Convert motion state byte to seconds.
[ "Convert", "motion", "state", "byte", "to", "seconds", "." ]
15a83e9750d0a4393f8a36868e07f6d9458253fe
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/packet_types/estimote.py#L46-L59
1,155
Polyconseil/zbarlight
docs/conf.py
_Zbarlight.monkey_patch
def monkey_patch(cls): """Monkey path zbarlight C extension on Read The Docs""" on_read_the_docs = os.environ.get('READTHEDOCS', False) if on_read_the_docs: sys.modules['zbarlight._zbarlight'] = cls
python
def monkey_patch(cls): """Monkey path zbarlight C extension on Read The Docs""" on_read_the_docs = os.environ.get('READTHEDOCS', False) if on_read_the_docs: sys.modules['zbarlight._zbarlight'] = cls
[ "def", "monkey_patch", "(", "cls", ")", ":", "on_read_the_docs", "=", "os", ".", "environ", ".", "get", "(", "'READTHEDOCS'", ",", "False", ")", "if", "on_read_the_docs", ":", "sys", ".", "modules", "[", "'zbarlight._zbarlight'", "]", "=", "cls" ]
Monkey path zbarlight C extension on Read The Docs
[ "Monkey", "path", "zbarlight", "C", "extension", "on", "Read", "The", "Docs" ]
97f46696516683af863d87935074e772e89b4292
https://github.com/Polyconseil/zbarlight/blob/97f46696516683af863d87935074e772e89b4292/docs/conf.py#L23-L27
1,156
adafruit/Adafruit_CircuitPython_framebuf
adafruit_framebuf.py
MVLSBFormat.set_pixel
def set_pixel(framebuf, x, y, color): """Set a given pixel to a color.""" index = (y >> 3) * framebuf.stride + x offset = y & 0x07 framebuf.buf[index] = (framebuf.buf[index] & ~(0x01 << offset)) | ((color != 0) << offset)
python
def set_pixel(framebuf, x, y, color): """Set a given pixel to a color.""" index = (y >> 3) * framebuf.stride + x offset = y & 0x07 framebuf.buf[index] = (framebuf.buf[index] & ~(0x01 << offset)) | ((color != 0) << offset)
[ "def", "set_pixel", "(", "framebuf", ",", "x", ",", "y", ",", "color", ")", ":", "index", "=", "(", "y", ">>", "3", ")", "*", "framebuf", ".", "stride", "+", "x", "offset", "=", "y", "&", "0x07", "framebuf", ".", "buf", "[", "index", "]", "=", "(", "framebuf", ".", "buf", "[", "index", "]", "&", "~", "(", "0x01", "<<", "offset", ")", ")", "|", "(", "(", "color", "!=", "0", ")", "<<", "offset", ")" ]
Set a given pixel to a color.
[ "Set", "a", "given", "pixel", "to", "a", "color", "." ]
b9f62c4b71efa963150f9c5a0284b61c7add9d02
https://github.com/adafruit/Adafruit_CircuitPython_framebuf/blob/b9f62c4b71efa963150f9c5a0284b61c7add9d02/adafruit_framebuf.py#L96-L100
1,157
adafruit/Adafruit_CircuitPython_framebuf
adafruit_framebuf.py
MVLSBFormat.get_pixel
def get_pixel(framebuf, x, y): """Get the color of a given pixel""" index = (y >> 3) * framebuf.stride + x offset = y & 0x07 return (framebuf.buf[index] >> offset) & 0x01
python
def get_pixel(framebuf, x, y): """Get the color of a given pixel""" index = (y >> 3) * framebuf.stride + x offset = y & 0x07 return (framebuf.buf[index] >> offset) & 0x01
[ "def", "get_pixel", "(", "framebuf", ",", "x", ",", "y", ")", ":", "index", "=", "(", "y", ">>", "3", ")", "*", "framebuf", ".", "stride", "+", "x", "offset", "=", "y", "&", "0x07", "return", "(", "framebuf", ".", "buf", "[", "index", "]", ">>", "offset", ")", "&", "0x01" ]
Get the color of a given pixel
[ "Get", "the", "color", "of", "a", "given", "pixel" ]
b9f62c4b71efa963150f9c5a0284b61c7add9d02
https://github.com/adafruit/Adafruit_CircuitPython_framebuf/blob/b9f62c4b71efa963150f9c5a0284b61c7add9d02/adafruit_framebuf.py#L103-L107
1,158
adafruit/Adafruit_CircuitPython_framebuf
adafruit_framebuf.py
FrameBuffer.pixel
def pixel(self, x, y, color=None): """If ``color`` is not given, get the color value of the specified pixel. If ``color`` is given, set the specified pixel to the given color.""" if self.rotation == 1: x, y = y, x x = self.width - x - 1 if self.rotation == 2: x = self.width - x - 1 y = self.height - y - 1 if self.rotation == 3: x, y = y, x y = self.height - y - 1 if x < 0 or x >= self.width or y < 0 or y >= self.height: return None if color is None: return self.format.get_pixel(self, x, y) self.format.set_pixel(self, x, y, color) return None
python
def pixel(self, x, y, color=None): """If ``color`` is not given, get the color value of the specified pixel. If ``color`` is given, set the specified pixel to the given color.""" if self.rotation == 1: x, y = y, x x = self.width - x - 1 if self.rotation == 2: x = self.width - x - 1 y = self.height - y - 1 if self.rotation == 3: x, y = y, x y = self.height - y - 1 if x < 0 or x >= self.width or y < 0 or y >= self.height: return None if color is None: return self.format.get_pixel(self, x, y) self.format.set_pixel(self, x, y, color) return None
[ "def", "pixel", "(", "self", ",", "x", ",", "y", ",", "color", "=", "None", ")", ":", "if", "self", ".", "rotation", "==", "1", ":", "x", ",", "y", "=", "y", ",", "x", "x", "=", "self", ".", "width", "-", "x", "-", "1", "if", "self", ".", "rotation", "==", "2", ":", "x", "=", "self", ".", "width", "-", "x", "-", "1", "y", "=", "self", ".", "height", "-", "y", "-", "1", "if", "self", ".", "rotation", "==", "3", ":", "x", ",", "y", "=", "y", ",", "x", "y", "=", "self", ".", "height", "-", "y", "-", "1", "if", "x", "<", "0", "or", "x", ">=", "self", ".", "width", "or", "y", "<", "0", "or", "y", ">=", "self", ".", "height", ":", "return", "None", "if", "color", "is", "None", ":", "return", "self", ".", "format", ".", "get_pixel", "(", "self", ",", "x", ",", "y", ")", "self", ".", "format", ".", "set_pixel", "(", "self", ",", "x", ",", "y", ",", "color", ")", "return", "None" ]
If ``color`` is not given, get the color value of the specified pixel. If ``color`` is given, set the specified pixel to the given color.
[ "If", "color", "is", "not", "given", "get", "the", "color", "value", "of", "the", "specified", "pixel", ".", "If", "color", "is", "given", "set", "the", "specified", "pixel", "to", "the", "given", "color", "." ]
b9f62c4b71efa963150f9c5a0284b61c7add9d02
https://github.com/adafruit/Adafruit_CircuitPython_framebuf/blob/b9f62c4b71efa963150f9c5a0284b61c7add9d02/adafruit_framebuf.py#L189-L207
1,159
adafruit/Adafruit_CircuitPython_framebuf
adafruit_framebuf.py
FrameBuffer.hline
def hline(self, x, y, width, color): """Draw a horizontal line up to a given length.""" self.rect(x, y, width, 1, color, fill=True)
python
def hline(self, x, y, width, color): """Draw a horizontal line up to a given length.""" self.rect(x, y, width, 1, color, fill=True)
[ "def", "hline", "(", "self", ",", "x", ",", "y", ",", "width", ",", "color", ")", ":", "self", ".", "rect", "(", "x", ",", "y", ",", "width", ",", "1", ",", "color", ",", "fill", "=", "True", ")" ]
Draw a horizontal line up to a given length.
[ "Draw", "a", "horizontal", "line", "up", "to", "a", "given", "length", "." ]
b9f62c4b71efa963150f9c5a0284b61c7add9d02
https://github.com/adafruit/Adafruit_CircuitPython_framebuf/blob/b9f62c4b71efa963150f9c5a0284b61c7add9d02/adafruit_framebuf.py#L209-L211
1,160
adafruit/Adafruit_CircuitPython_framebuf
adafruit_framebuf.py
FrameBuffer.vline
def vline(self, x, y, height, color): """Draw a vertical line up to a given length.""" self.rect(x, y, 1, height, color, fill=True)
python
def vline(self, x, y, height, color): """Draw a vertical line up to a given length.""" self.rect(x, y, 1, height, color, fill=True)
[ "def", "vline", "(", "self", ",", "x", ",", "y", ",", "height", ",", "color", ")", ":", "self", ".", "rect", "(", "x", ",", "y", ",", "1", ",", "height", ",", "color", ",", "fill", "=", "True", ")" ]
Draw a vertical line up to a given length.
[ "Draw", "a", "vertical", "line", "up", "to", "a", "given", "length", "." ]
b9f62c4b71efa963150f9c5a0284b61c7add9d02
https://github.com/adafruit/Adafruit_CircuitPython_framebuf/blob/b9f62c4b71efa963150f9c5a0284b61c7add9d02/adafruit_framebuf.py#L213-L215
1,161
adafruit/Adafruit_CircuitPython_framebuf
adafruit_framebuf.py
FrameBuffer.rect
def rect(self, x, y, width, height, color, *, fill=False): """Draw a rectangle at the given location, size and color. The ```rect``` method draws only a 1 pixel outline.""" # pylint: disable=too-many-arguments if self.rotation == 1: x, y = y, x width, height = height, width x = self.width - x - width if self.rotation == 2: x = self.width - x - width y = self.height - y - height if self.rotation == 3: x, y = y, x width, height = height, width y = self.height - y - height # pylint: disable=too-many-boolean-expressions if width < 1 or height < 1 or (x + width) <= 0 or (y + height) <= 0 or \ y >= self.height or x >= self.width: return x_end = min(self.width-1, x + width-1) y_end = min(self.height-1, y + height-1) x = max(x, 0) y = max(y, 0) if fill: self.format.fill_rect(self, x, y, x_end-x+1, y_end-y+1, color) else: self.format.fill_rect(self, x, y, x_end-x+1, 1, color) self.format.fill_rect(self, x, y, 1, y_end-y+1, color) self.format.fill_rect(self, x, y_end, x_end-x+1, 1, color) self.format.fill_rect(self, x_end, y, 1, y_end-y+1, color)
python
def rect(self, x, y, width, height, color, *, fill=False): """Draw a rectangle at the given location, size and color. The ```rect``` method draws only a 1 pixel outline.""" # pylint: disable=too-many-arguments if self.rotation == 1: x, y = y, x width, height = height, width x = self.width - x - width if self.rotation == 2: x = self.width - x - width y = self.height - y - height if self.rotation == 3: x, y = y, x width, height = height, width y = self.height - y - height # pylint: disable=too-many-boolean-expressions if width < 1 or height < 1 or (x + width) <= 0 or (y + height) <= 0 or \ y >= self.height or x >= self.width: return x_end = min(self.width-1, x + width-1) y_end = min(self.height-1, y + height-1) x = max(x, 0) y = max(y, 0) if fill: self.format.fill_rect(self, x, y, x_end-x+1, y_end-y+1, color) else: self.format.fill_rect(self, x, y, x_end-x+1, 1, color) self.format.fill_rect(self, x, y, 1, y_end-y+1, color) self.format.fill_rect(self, x, y_end, x_end-x+1, 1, color) self.format.fill_rect(self, x_end, y, 1, y_end-y+1, color)
[ "def", "rect", "(", "self", ",", "x", ",", "y", ",", "width", ",", "height", ",", "color", ",", "*", ",", "fill", "=", "False", ")", ":", "# pylint: disable=too-many-arguments", "if", "self", ".", "rotation", "==", "1", ":", "x", ",", "y", "=", "y", ",", "x", "width", ",", "height", "=", "height", ",", "width", "x", "=", "self", ".", "width", "-", "x", "-", "width", "if", "self", ".", "rotation", "==", "2", ":", "x", "=", "self", ".", "width", "-", "x", "-", "width", "y", "=", "self", ".", "height", "-", "y", "-", "height", "if", "self", ".", "rotation", "==", "3", ":", "x", ",", "y", "=", "y", ",", "x", "width", ",", "height", "=", "height", ",", "width", "y", "=", "self", ".", "height", "-", "y", "-", "height", "# pylint: disable=too-many-boolean-expressions", "if", "width", "<", "1", "or", "height", "<", "1", "or", "(", "x", "+", "width", ")", "<=", "0", "or", "(", "y", "+", "height", ")", "<=", "0", "or", "y", ">=", "self", ".", "height", "or", "x", ">=", "self", ".", "width", ":", "return", "x_end", "=", "min", "(", "self", ".", "width", "-", "1", ",", "x", "+", "width", "-", "1", ")", "y_end", "=", "min", "(", "self", ".", "height", "-", "1", ",", "y", "+", "height", "-", "1", ")", "x", "=", "max", "(", "x", ",", "0", ")", "y", "=", "max", "(", "y", ",", "0", ")", "if", "fill", ":", "self", ".", "format", ".", "fill_rect", "(", "self", ",", "x", ",", "y", ",", "x_end", "-", "x", "+", "1", ",", "y_end", "-", "y", "+", "1", ",", "color", ")", "else", ":", "self", ".", "format", ".", "fill_rect", "(", "self", ",", "x", ",", "y", ",", "x_end", "-", "x", "+", "1", ",", "1", ",", "color", ")", "self", ".", "format", ".", "fill_rect", "(", "self", ",", "x", ",", "y", ",", "1", ",", "y_end", "-", "y", "+", "1", ",", "color", ")", "self", ".", "format", ".", "fill_rect", "(", "self", ",", "x", ",", "y_end", ",", "x_end", "-", "x", "+", "1", ",", "1", ",", "color", ")", "self", ".", "format", ".", "fill_rect", "(", "self", ",", "x_end", ",", "y", ",", "1", ",", "y_end", "-", "y", "+", "1", ",", "color", ")" ]
Draw a rectangle at the given location, size and color. The ```rect``` method draws only a 1 pixel outline.
[ "Draw", "a", "rectangle", "at", "the", "given", "location", "size", "and", "color", ".", "The", "rect", "method", "draws", "only", "a", "1", "pixel", "outline", "." ]
b9f62c4b71efa963150f9c5a0284b61c7add9d02
https://github.com/adafruit/Adafruit_CircuitPython_framebuf/blob/b9f62c4b71efa963150f9c5a0284b61c7add9d02/adafruit_framebuf.py#L217-L247
1,162
adafruit/Adafruit_CircuitPython_framebuf
adafruit_framebuf.py
FrameBuffer.line
def line(self, x_0, y_0, x_1, y_1, color): # pylint: disable=too-many-arguments """Bresenham's line algorithm""" d_x = abs(x_1 - x_0) d_y = abs(y_1 - y_0) x, y = x_0, y_0 s_x = -1 if x_0 > x_1 else 1 s_y = -1 if y_0 > y_1 else 1 if d_x > d_y: err = d_x / 2.0 while x != x_1: self.pixel(x, y, color) err -= d_y if err < 0: y += s_y err += d_x x += s_x else: err = d_y / 2.0 while y != y_1: self.pixel(x, y, color) err -= d_x if err < 0: x += s_x err += d_y y += s_y self.pixel(x, y, color)
python
def line(self, x_0, y_0, x_1, y_1, color): # pylint: disable=too-many-arguments """Bresenham's line algorithm""" d_x = abs(x_1 - x_0) d_y = abs(y_1 - y_0) x, y = x_0, y_0 s_x = -1 if x_0 > x_1 else 1 s_y = -1 if y_0 > y_1 else 1 if d_x > d_y: err = d_x / 2.0 while x != x_1: self.pixel(x, y, color) err -= d_y if err < 0: y += s_y err += d_x x += s_x else: err = d_y / 2.0 while y != y_1: self.pixel(x, y, color) err -= d_x if err < 0: x += s_x err += d_y y += s_y self.pixel(x, y, color)
[ "def", "line", "(", "self", ",", "x_0", ",", "y_0", ",", "x_1", ",", "y_1", ",", "color", ")", ":", "# pylint: disable=too-many-arguments", "d_x", "=", "abs", "(", "x_1", "-", "x_0", ")", "d_y", "=", "abs", "(", "y_1", "-", "y_0", ")", "x", ",", "y", "=", "x_0", ",", "y_0", "s_x", "=", "-", "1", "if", "x_0", ">", "x_1", "else", "1", "s_y", "=", "-", "1", "if", "y_0", ">", "y_1", "else", "1", "if", "d_x", ">", "d_y", ":", "err", "=", "d_x", "/", "2.0", "while", "x", "!=", "x_1", ":", "self", ".", "pixel", "(", "x", ",", "y", ",", "color", ")", "err", "-=", "d_y", "if", "err", "<", "0", ":", "y", "+=", "s_y", "err", "+=", "d_x", "x", "+=", "s_x", "else", ":", "err", "=", "d_y", "/", "2.0", "while", "y", "!=", "y_1", ":", "self", ".", "pixel", "(", "x", ",", "y", ",", "color", ")", "err", "-=", "d_x", "if", "err", "<", "0", ":", "x", "+=", "s_x", "err", "+=", "d_y", "y", "+=", "s_y", "self", ".", "pixel", "(", "x", ",", "y", ",", "color", ")" ]
Bresenham's line algorithm
[ "Bresenham", "s", "line", "algorithm" ]
b9f62c4b71efa963150f9c5a0284b61c7add9d02
https://github.com/adafruit/Adafruit_CircuitPython_framebuf/blob/b9f62c4b71efa963150f9c5a0284b61c7add9d02/adafruit_framebuf.py#L249-L275
1,163
adafruit/Adafruit_CircuitPython_framebuf
adafruit_framebuf.py
FrameBuffer.scroll
def scroll(self, delta_x, delta_y): """shifts framebuf in x and y direction""" if delta_x < 0: shift_x = 0 xend = self.width + delta_x dt_x = 1 else: shift_x = self.width - 1 xend = delta_x - 1 dt_x = -1 if delta_y < 0: y = 0 yend = self.height + delta_y dt_y = 1 else: y = self.height - 1 yend = delta_y - 1 dt_y = -1 while y != yend: x = shift_x while x != xend: self.format.set_pixel( self, x, y, self.format.get_pixel(self, x - delta_x, y - delta_y)) x += dt_x y += dt_y
python
def scroll(self, delta_x, delta_y): """shifts framebuf in x and y direction""" if delta_x < 0: shift_x = 0 xend = self.width + delta_x dt_x = 1 else: shift_x = self.width - 1 xend = delta_x - 1 dt_x = -1 if delta_y < 0: y = 0 yend = self.height + delta_y dt_y = 1 else: y = self.height - 1 yend = delta_y - 1 dt_y = -1 while y != yend: x = shift_x while x != xend: self.format.set_pixel( self, x, y, self.format.get_pixel(self, x - delta_x, y - delta_y)) x += dt_x y += dt_y
[ "def", "scroll", "(", "self", ",", "delta_x", ",", "delta_y", ")", ":", "if", "delta_x", "<", "0", ":", "shift_x", "=", "0", "xend", "=", "self", ".", "width", "+", "delta_x", "dt_x", "=", "1", "else", ":", "shift_x", "=", "self", ".", "width", "-", "1", "xend", "=", "delta_x", "-", "1", "dt_x", "=", "-", "1", "if", "delta_y", "<", "0", ":", "y", "=", "0", "yend", "=", "self", ".", "height", "+", "delta_y", "dt_y", "=", "1", "else", ":", "y", "=", "self", ".", "height", "-", "1", "yend", "=", "delta_y", "-", "1", "dt_y", "=", "-", "1", "while", "y", "!=", "yend", ":", "x", "=", "shift_x", "while", "x", "!=", "xend", ":", "self", ".", "format", ".", "set_pixel", "(", "self", ",", "x", ",", "y", ",", "self", ".", "format", ".", "get_pixel", "(", "self", ",", "x", "-", "delta_x", ",", "y", "-", "delta_y", ")", ")", "x", "+=", "dt_x", "y", "+=", "dt_y" ]
shifts framebuf in x and y direction
[ "shifts", "framebuf", "in", "x", "and", "y", "direction" ]
b9f62c4b71efa963150f9c5a0284b61c7add9d02
https://github.com/adafruit/Adafruit_CircuitPython_framebuf/blob/b9f62c4b71efa963150f9c5a0284b61c7add9d02/adafruit_framebuf.py#L281-L305
1,164
adafruit/Adafruit_CircuitPython_framebuf
adafruit_framebuf.py
FrameBuffer.text
def text(self, string, x, y, color, *, font_name="font5x8.bin"): """text is not yet implemented""" if not self._font or self._font.font_name != font_name: # load the font! self._font = BitmapFont() w = self._font.font_width for i, char in enumerate(string): self._font.draw_char(char, x + (i * (w + 1)), y, self, color)
python
def text(self, string, x, y, color, *, font_name="font5x8.bin"): """text is not yet implemented""" if not self._font or self._font.font_name != font_name: # load the font! self._font = BitmapFont() w = self._font.font_width for i, char in enumerate(string): self._font.draw_char(char, x + (i * (w + 1)), y, self, color)
[ "def", "text", "(", "self", ",", "string", ",", "x", ",", "y", ",", "color", ",", "*", ",", "font_name", "=", "\"font5x8.bin\"", ")", ":", "if", "not", "self", ".", "_font", "or", "self", ".", "_font", ".", "font_name", "!=", "font_name", ":", "# load the font!", "self", ".", "_font", "=", "BitmapFont", "(", ")", "w", "=", "self", ".", "_font", ".", "font_width", "for", "i", ",", "char", "in", "enumerate", "(", "string", ")", ":", "self", ".", "_font", ".", "draw_char", "(", "char", ",", "x", "+", "(", "i", "*", "(", "w", "+", "1", ")", ")", ",", "y", ",", "self", ",", "color", ")" ]
text is not yet implemented
[ "text", "is", "not", "yet", "implemented" ]
b9f62c4b71efa963150f9c5a0284b61c7add9d02
https://github.com/adafruit/Adafruit_CircuitPython_framebuf/blob/b9f62c4b71efa963150f9c5a0284b61c7add9d02/adafruit_framebuf.py#L307-L317
1,165
loanzen/falcon-auth
falcon_auth/backends.py
AuthBackend.parse_auth_token_from_request
def parse_auth_token_from_request(self, auth_header): """ Parses and returns Auth token from the request header. Raises `falcon.HTTPUnauthoried exception` with proper error message """ if not auth_header: raise falcon.HTTPUnauthorized( description='Missing Authorization Header') parts = auth_header.split() if parts[0].lower() != self.auth_header_prefix.lower(): raise falcon.HTTPUnauthorized( description='Invalid Authorization Header: ' 'Must start with {0}'.format(self.auth_header_prefix)) elif len(parts) == 1: raise falcon.HTTPUnauthorized( description='Invalid Authorization Header: Token Missing') elif len(parts) > 2: raise falcon.HTTPUnauthorized( description='Invalid Authorization Header: Contains extra content') return parts[1]
python
def parse_auth_token_from_request(self, auth_header): """ Parses and returns Auth token from the request header. Raises `falcon.HTTPUnauthoried exception` with proper error message """ if not auth_header: raise falcon.HTTPUnauthorized( description='Missing Authorization Header') parts = auth_header.split() if parts[0].lower() != self.auth_header_prefix.lower(): raise falcon.HTTPUnauthorized( description='Invalid Authorization Header: ' 'Must start with {0}'.format(self.auth_header_prefix)) elif len(parts) == 1: raise falcon.HTTPUnauthorized( description='Invalid Authorization Header: Token Missing') elif len(parts) > 2: raise falcon.HTTPUnauthorized( description='Invalid Authorization Header: Contains extra content') return parts[1]
[ "def", "parse_auth_token_from_request", "(", "self", ",", "auth_header", ")", ":", "if", "not", "auth_header", ":", "raise", "falcon", ".", "HTTPUnauthorized", "(", "description", "=", "'Missing Authorization Header'", ")", "parts", "=", "auth_header", ".", "split", "(", ")", "if", "parts", "[", "0", "]", ".", "lower", "(", ")", "!=", "self", ".", "auth_header_prefix", ".", "lower", "(", ")", ":", "raise", "falcon", ".", "HTTPUnauthorized", "(", "description", "=", "'Invalid Authorization Header: '", "'Must start with {0}'", ".", "format", "(", "self", ".", "auth_header_prefix", ")", ")", "elif", "len", "(", "parts", ")", "==", "1", ":", "raise", "falcon", ".", "HTTPUnauthorized", "(", "description", "=", "'Invalid Authorization Header: Token Missing'", ")", "elif", "len", "(", "parts", ")", ">", "2", ":", "raise", "falcon", ".", "HTTPUnauthorized", "(", "description", "=", "'Invalid Authorization Header: Contains extra content'", ")", "return", "parts", "[", "1", "]" ]
Parses and returns Auth token from the request header. Raises `falcon.HTTPUnauthoried exception` with proper error message
[ "Parses", "and", "returns", "Auth", "token", "from", "the", "request", "header", ".", "Raises", "falcon", ".", "HTTPUnauthoried", "exception", "with", "proper", "error", "message" ]
b9063163fff8044a8579a6047a85f28f3b214fdf
https://github.com/loanzen/falcon-auth/blob/b9063163fff8044a8579a6047a85f28f3b214fdf/falcon_auth/backends.py#L52-L75
1,166
loanzen/falcon-auth
falcon_auth/backends.py
JWTAuthBackend.authenticate
def authenticate(self, req, resp, resource): """ Extract auth token from request `authorization` header, decode jwt token, verify configured claims and return either a ``user`` object if successful else raise an `falcon.HTTPUnauthoried exception` """ payload = self._decode_jwt_token(req) user = self.user_loader(payload) if not user: raise falcon.HTTPUnauthorized( description='Invalid JWT Credentials') return user
python
def authenticate(self, req, resp, resource): """ Extract auth token from request `authorization` header, decode jwt token, verify configured claims and return either a ``user`` object if successful else raise an `falcon.HTTPUnauthoried exception` """ payload = self._decode_jwt_token(req) user = self.user_loader(payload) if not user: raise falcon.HTTPUnauthorized( description='Invalid JWT Credentials') return user
[ "def", "authenticate", "(", "self", ",", "req", ",", "resp", ",", "resource", ")", ":", "payload", "=", "self", ".", "_decode_jwt_token", "(", "req", ")", "user", "=", "self", ".", "user_loader", "(", "payload", ")", "if", "not", "user", ":", "raise", "falcon", ".", "HTTPUnauthorized", "(", "description", "=", "'Invalid JWT Credentials'", ")", "return", "user" ]
Extract auth token from request `authorization` header, decode jwt token, verify configured claims and return either a ``user`` object if successful else raise an `falcon.HTTPUnauthoried exception`
[ "Extract", "auth", "token", "from", "request", "authorization", "header", "decode", "jwt", "token", "verify", "configured", "claims", "and", "return", "either", "a", "user", "object", "if", "successful", "else", "raise", "an", "falcon", ".", "HTTPUnauthoried", "exception" ]
b9063163fff8044a8579a6047a85f28f3b214fdf
https://github.com/loanzen/falcon-auth/blob/b9063163fff8044a8579a6047a85f28f3b214fdf/falcon_auth/backends.py#L213-L225
1,167
loanzen/falcon-auth
falcon_auth/backends.py
JWTAuthBackend.get_auth_token
def get_auth_token(self, user_payload): """ Create a JWT authentication token from ``user_payload`` Args: user_payload(dict, required): A `dict` containing required information to create authentication token """ now = datetime.utcnow() payload = { 'user': user_payload } if 'iat' in self.verify_claims: payload['iat'] = now if 'nbf' in self.verify_claims: payload['nbf'] = now + self.leeway if 'exp' in self.verify_claims: payload['exp'] = now + self.expiration_delta if self.audience is not None: payload['aud'] = self.audience if self.issuer is not None: payload['iss'] = self.issuer return jwt.encode( payload, self.secret_key, algorithm=self.algorithm, json_encoder=ExtendedJSONEncoder).decode('utf-8')
python
def get_auth_token(self, user_payload): """ Create a JWT authentication token from ``user_payload`` Args: user_payload(dict, required): A `dict` containing required information to create authentication token """ now = datetime.utcnow() payload = { 'user': user_payload } if 'iat' in self.verify_claims: payload['iat'] = now if 'nbf' in self.verify_claims: payload['nbf'] = now + self.leeway if 'exp' in self.verify_claims: payload['exp'] = now + self.expiration_delta if self.audience is not None: payload['aud'] = self.audience if self.issuer is not None: payload['iss'] = self.issuer return jwt.encode( payload, self.secret_key, algorithm=self.algorithm, json_encoder=ExtendedJSONEncoder).decode('utf-8')
[ "def", "get_auth_token", "(", "self", ",", "user_payload", ")", ":", "now", "=", "datetime", ".", "utcnow", "(", ")", "payload", "=", "{", "'user'", ":", "user_payload", "}", "if", "'iat'", "in", "self", ".", "verify_claims", ":", "payload", "[", "'iat'", "]", "=", "now", "if", "'nbf'", "in", "self", ".", "verify_claims", ":", "payload", "[", "'nbf'", "]", "=", "now", "+", "self", ".", "leeway", "if", "'exp'", "in", "self", ".", "verify_claims", ":", "payload", "[", "'exp'", "]", "=", "now", "+", "self", ".", "expiration_delta", "if", "self", ".", "audience", "is", "not", "None", ":", "payload", "[", "'aud'", "]", "=", "self", ".", "audience", "if", "self", ".", "issuer", "is", "not", "None", ":", "payload", "[", "'iss'", "]", "=", "self", ".", "issuer", "return", "jwt", ".", "encode", "(", "payload", ",", "self", ".", "secret_key", ",", "algorithm", "=", "self", ".", "algorithm", ",", "json_encoder", "=", "ExtendedJSONEncoder", ")", ".", "decode", "(", "'utf-8'", ")" ]
Create a JWT authentication token from ``user_payload`` Args: user_payload(dict, required): A `dict` containing required information to create authentication token
[ "Create", "a", "JWT", "authentication", "token", "from", "user_payload" ]
b9063163fff8044a8579a6047a85f28f3b214fdf
https://github.com/loanzen/falcon-auth/blob/b9063163fff8044a8579a6047a85f28f3b214fdf/falcon_auth/backends.py#L227-L258
1,168
loanzen/falcon-auth
falcon_auth/backends.py
TokenAuthBackend.get_auth_token
def get_auth_token(self, user_payload): """ Extracts token from the `user_payload` """ token = user_payload.get('token') or None if not token: raise ValueError('`user_payload` must provide api token') return '{auth_header_prefix} {token}'.format( auth_header_prefix=self.auth_header_prefix, token=token)
python
def get_auth_token(self, user_payload): """ Extracts token from the `user_payload` """ token = user_payload.get('token') or None if not token: raise ValueError('`user_payload` must provide api token') return '{auth_header_prefix} {token}'.format( auth_header_prefix=self.auth_header_prefix, token=token)
[ "def", "get_auth_token", "(", "self", ",", "user_payload", ")", ":", "token", "=", "user_payload", ".", "get", "(", "'token'", ")", "or", "None", "if", "not", "token", ":", "raise", "ValueError", "(", "'`user_payload` must provide api token'", ")", "return", "'{auth_header_prefix} {token}'", ".", "format", "(", "auth_header_prefix", "=", "self", ".", "auth_header_prefix", ",", "token", "=", "token", ")" ]
Extracts token from the `user_payload`
[ "Extracts", "token", "from", "the", "user_payload" ]
b9063163fff8044a8579a6047a85f28f3b214fdf
https://github.com/loanzen/falcon-auth/blob/b9063163fff8044a8579a6047a85f28f3b214fdf/falcon_auth/backends.py#L379-L388
1,169
loanzen/falcon-auth
falcon_auth/backends.py
HawkAuthBackend.parse_auth_token_from_request
def parse_auth_token_from_request(self, auth_header): """ Parses and returns the Hawk Authorization header if it is present and well-formed. Raises `falcon.HTTPUnauthoried exception` with proper error message """ if not auth_header: raise falcon.HTTPUnauthorized( description='Missing Authorization Header') try: auth_header_prefix, _ = auth_header.split(' ', 1) except ValueError: raise falcon.HTTPUnauthorized( description='Invalid Authorization Header: Missing Scheme or Parameters') if auth_header_prefix.lower() != self.auth_header_prefix.lower(): raise falcon.HTTPUnauthorized( description='Invalid Authorization Header: ' 'Must start with {0}'.format(self.auth_header_prefix)) return auth_header
python
def parse_auth_token_from_request(self, auth_header): """ Parses and returns the Hawk Authorization header if it is present and well-formed. Raises `falcon.HTTPUnauthoried exception` with proper error message """ if not auth_header: raise falcon.HTTPUnauthorized( description='Missing Authorization Header') try: auth_header_prefix, _ = auth_header.split(' ', 1) except ValueError: raise falcon.HTTPUnauthorized( description='Invalid Authorization Header: Missing Scheme or Parameters') if auth_header_prefix.lower() != self.auth_header_prefix.lower(): raise falcon.HTTPUnauthorized( description='Invalid Authorization Header: ' 'Must start with {0}'.format(self.auth_header_prefix)) return auth_header
[ "def", "parse_auth_token_from_request", "(", "self", ",", "auth_header", ")", ":", "if", "not", "auth_header", ":", "raise", "falcon", ".", "HTTPUnauthorized", "(", "description", "=", "'Missing Authorization Header'", ")", "try", ":", "auth_header_prefix", ",", "_", "=", "auth_header", ".", "split", "(", "' '", ",", "1", ")", "except", "ValueError", ":", "raise", "falcon", ".", "HTTPUnauthorized", "(", "description", "=", "'Invalid Authorization Header: Missing Scheme or Parameters'", ")", "if", "auth_header_prefix", ".", "lower", "(", ")", "!=", "self", ".", "auth_header_prefix", ".", "lower", "(", ")", ":", "raise", "falcon", ".", "HTTPUnauthorized", "(", "description", "=", "'Invalid Authorization Header: '", "'Must start with {0}'", ".", "format", "(", "self", ".", "auth_header_prefix", ")", ")", "return", "auth_header" ]
Parses and returns the Hawk Authorization header if it is present and well-formed. Raises `falcon.HTTPUnauthoried exception` with proper error message
[ "Parses", "and", "returns", "the", "Hawk", "Authorization", "header", "if", "it", "is", "present", "and", "well", "-", "formed", ".", "Raises", "falcon", ".", "HTTPUnauthoried", "exception", "with", "proper", "error", "message" ]
b9063163fff8044a8579a6047a85f28f3b214fdf
https://github.com/loanzen/falcon-auth/blob/b9063163fff8044a8579a6047a85f28f3b214fdf/falcon_auth/backends.py#L442-L462
1,170
gmarull/qtmodern
qtmodern/styles.py
_apply_base_theme
def _apply_base_theme(app): """ Apply base theme to the application. Args: app (QApplication): QApplication instance. """ if QT_VERSION < (5,): app.setStyle('plastique') else: app.setStyle('Fusion') with open(_STYLESHEET) as stylesheet: app.setStyleSheet(stylesheet.read())
python
def _apply_base_theme(app): """ Apply base theme to the application. Args: app (QApplication): QApplication instance. """ if QT_VERSION < (5,): app.setStyle('plastique') else: app.setStyle('Fusion') with open(_STYLESHEET) as stylesheet: app.setStyleSheet(stylesheet.read())
[ "def", "_apply_base_theme", "(", "app", ")", ":", "if", "QT_VERSION", "<", "(", "5", ",", ")", ":", "app", ".", "setStyle", "(", "'plastique'", ")", "else", ":", "app", ".", "setStyle", "(", "'Fusion'", ")", "with", "open", "(", "_STYLESHEET", ")", "as", "stylesheet", ":", "app", ".", "setStyleSheet", "(", "stylesheet", ".", "read", "(", ")", ")" ]
Apply base theme to the application. Args: app (QApplication): QApplication instance.
[ "Apply", "base", "theme", "to", "the", "application", "." ]
b58b24c5bcfa0b81c7b1af5a7dfdc0fae660ce0f
https://github.com/gmarull/qtmodern/blob/b58b24c5bcfa0b81c7b1af5a7dfdc0fae660ce0f/qtmodern/styles.py#L11-L24
1,171
gmarull/qtmodern
qtmodern/styles.py
dark
def dark(app): """ Apply Dark Theme to the Qt application instance. Args: app (QApplication): QApplication instance. """ _apply_base_theme(app) darkPalette = QPalette() # base darkPalette.setColor(QPalette.WindowText, QColor(180, 180, 180)) darkPalette.setColor(QPalette.Button, QColor(53, 53, 53)) darkPalette.setColor(QPalette.Light, QColor(180, 180, 180)) darkPalette.setColor(QPalette.Midlight, QColor(90, 90, 90)) darkPalette.setColor(QPalette.Dark, QColor(35, 35, 35)) darkPalette.setColor(QPalette.Text, QColor(180, 180, 180)) darkPalette.setColor(QPalette.BrightText, QColor(180, 180, 180)) darkPalette.setColor(QPalette.ButtonText, QColor(180, 180, 180)) darkPalette.setColor(QPalette.Base, QColor(42, 42, 42)) darkPalette.setColor(QPalette.Window, QColor(53, 53, 53)) darkPalette.setColor(QPalette.Shadow, QColor(20, 20, 20)) darkPalette.setColor(QPalette.Highlight, QColor(42, 130, 218)) darkPalette.setColor(QPalette.HighlightedText, QColor(180, 180, 180)) darkPalette.setColor(QPalette.Link, QColor(56, 252, 196)) darkPalette.setColor(QPalette.AlternateBase, QColor(66, 66, 66)) darkPalette.setColor(QPalette.ToolTipBase, QColor(53, 53, 53)) darkPalette.setColor(QPalette.ToolTipText, QColor(180, 180, 180)) # disabled darkPalette.setColor(QPalette.Disabled, QPalette.WindowText, QColor(127, 127, 127)) darkPalette.setColor(QPalette.Disabled, QPalette.Text, QColor(127, 127, 127)) darkPalette.setColor(QPalette.Disabled, QPalette.ButtonText, QColor(127, 127, 127)) darkPalette.setColor(QPalette.Disabled, QPalette.Highlight, QColor(80, 80, 80)) darkPalette.setColor(QPalette.Disabled, QPalette.HighlightedText, QColor(127, 127, 127)) app.setPalette(darkPalette)
python
def dark(app): """ Apply Dark Theme to the Qt application instance. Args: app (QApplication): QApplication instance. """ _apply_base_theme(app) darkPalette = QPalette() # base darkPalette.setColor(QPalette.WindowText, QColor(180, 180, 180)) darkPalette.setColor(QPalette.Button, QColor(53, 53, 53)) darkPalette.setColor(QPalette.Light, QColor(180, 180, 180)) darkPalette.setColor(QPalette.Midlight, QColor(90, 90, 90)) darkPalette.setColor(QPalette.Dark, QColor(35, 35, 35)) darkPalette.setColor(QPalette.Text, QColor(180, 180, 180)) darkPalette.setColor(QPalette.BrightText, QColor(180, 180, 180)) darkPalette.setColor(QPalette.ButtonText, QColor(180, 180, 180)) darkPalette.setColor(QPalette.Base, QColor(42, 42, 42)) darkPalette.setColor(QPalette.Window, QColor(53, 53, 53)) darkPalette.setColor(QPalette.Shadow, QColor(20, 20, 20)) darkPalette.setColor(QPalette.Highlight, QColor(42, 130, 218)) darkPalette.setColor(QPalette.HighlightedText, QColor(180, 180, 180)) darkPalette.setColor(QPalette.Link, QColor(56, 252, 196)) darkPalette.setColor(QPalette.AlternateBase, QColor(66, 66, 66)) darkPalette.setColor(QPalette.ToolTipBase, QColor(53, 53, 53)) darkPalette.setColor(QPalette.ToolTipText, QColor(180, 180, 180)) # disabled darkPalette.setColor(QPalette.Disabled, QPalette.WindowText, QColor(127, 127, 127)) darkPalette.setColor(QPalette.Disabled, QPalette.Text, QColor(127, 127, 127)) darkPalette.setColor(QPalette.Disabled, QPalette.ButtonText, QColor(127, 127, 127)) darkPalette.setColor(QPalette.Disabled, QPalette.Highlight, QColor(80, 80, 80)) darkPalette.setColor(QPalette.Disabled, QPalette.HighlightedText, QColor(127, 127, 127)) app.setPalette(darkPalette)
[ "def", "dark", "(", "app", ")", ":", "_apply_base_theme", "(", "app", ")", "darkPalette", "=", "QPalette", "(", ")", "# base", "darkPalette", ".", "setColor", "(", "QPalette", ".", "WindowText", ",", "QColor", "(", "180", ",", "180", ",", "180", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Button", ",", "QColor", "(", "53", ",", "53", ",", "53", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Light", ",", "QColor", "(", "180", ",", "180", ",", "180", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Midlight", ",", "QColor", "(", "90", ",", "90", ",", "90", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Dark", ",", "QColor", "(", "35", ",", "35", ",", "35", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Text", ",", "QColor", "(", "180", ",", "180", ",", "180", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "BrightText", ",", "QColor", "(", "180", ",", "180", ",", "180", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "ButtonText", ",", "QColor", "(", "180", ",", "180", ",", "180", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Base", ",", "QColor", "(", "42", ",", "42", ",", "42", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Window", ",", "QColor", "(", "53", ",", "53", ",", "53", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Shadow", ",", "QColor", "(", "20", ",", "20", ",", "20", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Highlight", ",", "QColor", "(", "42", ",", "130", ",", "218", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "HighlightedText", ",", "QColor", "(", "180", ",", "180", ",", "180", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Link", ",", "QColor", "(", "56", ",", "252", ",", "196", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "AlternateBase", ",", "QColor", "(", "66", ",", "66", ",", "66", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "ToolTipBase", ",", "QColor", "(", "53", ",", "53", ",", "53", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "ToolTipText", ",", "QColor", "(", "180", ",", "180", ",", "180", ")", ")", "# disabled", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Disabled", ",", "QPalette", ".", "WindowText", ",", "QColor", "(", "127", ",", "127", ",", "127", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Disabled", ",", "QPalette", ".", "Text", ",", "QColor", "(", "127", ",", "127", ",", "127", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Disabled", ",", "QPalette", ".", "ButtonText", ",", "QColor", "(", "127", ",", "127", ",", "127", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Disabled", ",", "QPalette", ".", "Highlight", ",", "QColor", "(", "80", ",", "80", ",", "80", ")", ")", "darkPalette", ".", "setColor", "(", "QPalette", ".", "Disabled", ",", "QPalette", ".", "HighlightedText", ",", "QColor", "(", "127", ",", "127", ",", "127", ")", ")", "app", ".", "setPalette", "(", "darkPalette", ")" ]
Apply Dark Theme to the Qt application instance. Args: app (QApplication): QApplication instance.
[ "Apply", "Dark", "Theme", "to", "the", "Qt", "application", "instance", "." ]
b58b24c5bcfa0b81c7b1af5a7dfdc0fae660ce0f
https://github.com/gmarull/qtmodern/blob/b58b24c5bcfa0b81c7b1af5a7dfdc0fae660ce0f/qtmodern/styles.py#L27-L69
1,172
matthias-k/cyipopt
doc/source/sphinxext/inheritance_diagram.py
inheritance_diagram_directive
def inheritance_diagram_directive(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine): """ Run when the inheritance_diagram directive is first encountered. """ node = inheritance_diagram() class_names = arguments # Create a graph starting with the list of classes graph = InheritanceGraph(class_names) # Create xref nodes for each target of the graph's image map and # add them to the doc tree so that Sphinx can resolve the # references to real URLs later. These nodes will eventually be # removed from the doctree after we're done with them. for name in graph.get_all_class_names(): refnodes, x = xfileref_role( 'class', ':class:`%s`' % name, name, 0, state) node.extend(refnodes) # Store the graph object so we can use it to generate the # dot file later node['graph'] = graph # Store the original content for use as a hash node['parts'] = options.get('parts', 0) node['content'] = " ".join(class_names) return [node]
python
def inheritance_diagram_directive(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine): """ Run when the inheritance_diagram directive is first encountered. """ node = inheritance_diagram() class_names = arguments # Create a graph starting with the list of classes graph = InheritanceGraph(class_names) # Create xref nodes for each target of the graph's image map and # add them to the doc tree so that Sphinx can resolve the # references to real URLs later. These nodes will eventually be # removed from the doctree after we're done with them. for name in graph.get_all_class_names(): refnodes, x = xfileref_role( 'class', ':class:`%s`' % name, name, 0, state) node.extend(refnodes) # Store the graph object so we can use it to generate the # dot file later node['graph'] = graph # Store the original content for use as a hash node['parts'] = options.get('parts', 0) node['content'] = " ".join(class_names) return [node]
[ "def", "inheritance_diagram_directive", "(", "name", ",", "arguments", ",", "options", ",", "content", ",", "lineno", ",", "content_offset", ",", "block_text", ",", "state", ",", "state_machine", ")", ":", "node", "=", "inheritance_diagram", "(", ")", "class_names", "=", "arguments", "# Create a graph starting with the list of classes", "graph", "=", "InheritanceGraph", "(", "class_names", ")", "# Create xref nodes for each target of the graph's image map and", "# add them to the doc tree so that Sphinx can resolve the", "# references to real URLs later. These nodes will eventually be", "# removed from the doctree after we're done with them.", "for", "name", "in", "graph", ".", "get_all_class_names", "(", ")", ":", "refnodes", ",", "x", "=", "xfileref_role", "(", "'class'", ",", "':class:`%s`'", "%", "name", ",", "name", ",", "0", ",", "state", ")", "node", ".", "extend", "(", "refnodes", ")", "# Store the graph object so we can use it to generate the", "# dot file later", "node", "[", "'graph'", "]", "=", "graph", "# Store the original content for use as a hash", "node", "[", "'parts'", "]", "=", "options", ".", "get", "(", "'parts'", ",", "0", ")", "node", "[", "'content'", "]", "=", "\" \"", ".", "join", "(", "class_names", ")", "return", "[", "node", "]" ]
Run when the inheritance_diagram directive is first encountered.
[ "Run", "when", "the", "inheritance_diagram", "directive", "is", "first", "encountered", "." ]
ed03f54de2e0b8c8ba4c0aa18ab9ab6c8846bc19
https://github.com/matthias-k/cyipopt/blob/ed03f54de2e0b8c8ba4c0aa18ab9ab6c8846bc19/doc/source/sphinxext/inheritance_diagram.py#L293-L320
1,173
matthias-k/cyipopt
doc/source/sphinxext/inheritance_diagram.py
InheritanceGraph.run_dot
def run_dot(self, args, name, parts=0, urls={}, graph_options={}, node_options={}, edge_options={}): """ Run graphviz 'dot' over this graph, returning whatever 'dot' writes to stdout. *args* will be passed along as commandline arguments. *name* is the name of the graph *urls* is a dictionary mapping class names to http urls Raises DotException for any of the many os and installation-related errors that may occur. """ try: dot = subprocess.Popen(['dot'] + list(args), stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True) except OSError: raise DotException("Could not execute 'dot'. Are you sure you have 'graphviz' installed?") except ValueError: raise DotException("'dot' called with invalid arguments") except: raise DotException("Unexpected error calling 'dot'") self.generate_dot(dot.stdin, name, parts, urls, graph_options, node_options, edge_options) dot.stdin.close() result = dot.stdout.read() returncode = dot.wait() if returncode != 0: raise DotException("'dot' returned the errorcode %d" % returncode) return result
python
def run_dot(self, args, name, parts=0, urls={}, graph_options={}, node_options={}, edge_options={}): """ Run graphviz 'dot' over this graph, returning whatever 'dot' writes to stdout. *args* will be passed along as commandline arguments. *name* is the name of the graph *urls* is a dictionary mapping class names to http urls Raises DotException for any of the many os and installation-related errors that may occur. """ try: dot = subprocess.Popen(['dot'] + list(args), stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True) except OSError: raise DotException("Could not execute 'dot'. Are you sure you have 'graphviz' installed?") except ValueError: raise DotException("'dot' called with invalid arguments") except: raise DotException("Unexpected error calling 'dot'") self.generate_dot(dot.stdin, name, parts, urls, graph_options, node_options, edge_options) dot.stdin.close() result = dot.stdout.read() returncode = dot.wait() if returncode != 0: raise DotException("'dot' returned the errorcode %d" % returncode) return result
[ "def", "run_dot", "(", "self", ",", "args", ",", "name", ",", "parts", "=", "0", ",", "urls", "=", "{", "}", ",", "graph_options", "=", "{", "}", ",", "node_options", "=", "{", "}", ",", "edge_options", "=", "{", "}", ")", ":", "try", ":", "dot", "=", "subprocess", ".", "Popen", "(", "[", "'dot'", "]", "+", "list", "(", "args", ")", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "close_fds", "=", "True", ")", "except", "OSError", ":", "raise", "DotException", "(", "\"Could not execute 'dot'. Are you sure you have 'graphviz' installed?\"", ")", "except", "ValueError", ":", "raise", "DotException", "(", "\"'dot' called with invalid arguments\"", ")", "except", ":", "raise", "DotException", "(", "\"Unexpected error calling 'dot'\"", ")", "self", ".", "generate_dot", "(", "dot", ".", "stdin", ",", "name", ",", "parts", ",", "urls", ",", "graph_options", ",", "node_options", ",", "edge_options", ")", "dot", ".", "stdin", ".", "close", "(", ")", "result", "=", "dot", ".", "stdout", ".", "read", "(", ")", "returncode", "=", "dot", ".", "wait", "(", ")", "if", "returncode", "!=", "0", ":", "raise", "DotException", "(", "\"'dot' returned the errorcode %d\"", "%", "returncode", ")", "return", "result" ]
Run graphviz 'dot' over this graph, returning whatever 'dot' writes to stdout. *args* will be passed along as commandline arguments. *name* is the name of the graph *urls* is a dictionary mapping class names to http urls Raises DotException for any of the many os and installation-related errors that may occur.
[ "Run", "graphviz", "dot", "over", "this", "graph", "returning", "whatever", "dot", "writes", "to", "stdout", "." ]
ed03f54de2e0b8c8ba4c0aa18ab9ab6c8846bc19
https://github.com/matthias-k/cyipopt/blob/ed03f54de2e0b8c8ba4c0aa18ab9ab6c8846bc19/doc/source/sphinxext/inheritance_diagram.py#L251-L284
1,174
schapman1974/tinymongo
setup.py
parse_md_to_rst
def parse_md_to_rst(file): """Read Markdown file and convert to ReStructured Text.""" try: from m2r import parse_from_file return parse_from_file(file).replace( "artwork/", "http://198.27.119.65/" ) except ImportError: # m2r may not be installed in user environment return read(file)
python
def parse_md_to_rst(file): """Read Markdown file and convert to ReStructured Text.""" try: from m2r import parse_from_file return parse_from_file(file).replace( "artwork/", "http://198.27.119.65/" ) except ImportError: # m2r may not be installed in user environment return read(file)
[ "def", "parse_md_to_rst", "(", "file", ")", ":", "try", ":", "from", "m2r", "import", "parse_from_file", "return", "parse_from_file", "(", "file", ")", ".", "replace", "(", "\"artwork/\"", ",", "\"http://198.27.119.65/\"", ")", "except", "ImportError", ":", "# m2r may not be installed in user environment", "return", "read", "(", "file", ")" ]
Read Markdown file and convert to ReStructured Text.
[ "Read", "Markdown", "file", "and", "convert", "to", "ReStructured", "Text", "." ]
993048059dc0aa789d879b69feb79a0f237a60b3
https://github.com/schapman1974/tinymongo/blob/993048059dc0aa789d879b69feb79a0f237a60b3/setup.py#L14-L23
1,175
schapman1974/tinymongo
tinymongo/results.py
DeleteResult.deleted_count
def deleted_count(self): """The number of documents deleted.""" if isinstance(self.raw_result, list): return len(self.raw_result) else: return self.raw_result
python
def deleted_count(self): """The number of documents deleted.""" if isinstance(self.raw_result, list): return len(self.raw_result) else: return self.raw_result
[ "def", "deleted_count", "(", "self", ")", ":", "if", "isinstance", "(", "self", ".", "raw_result", ",", "list", ")", ":", "return", "len", "(", "self", ".", "raw_result", ")", "else", ":", "return", "self", ".", "raw_result" ]
The number of documents deleted.
[ "The", "number", "of", "documents", "deleted", "." ]
993048059dc0aa789d879b69feb79a0f237a60b3
https://github.com/schapman1974/tinymongo/blob/993048059dc0aa789d879b69feb79a0f237a60b3/tinymongo/results.py#L107-L112
1,176
schapman1974/tinymongo
tinymongo/tinymongo.py
generate_id
def generate_id(): """Generate new UUID""" # TODO: Use six.string_type to Py3 compat try: return unicode(uuid1()).replace(u"-", u"") except NameError: return str(uuid1()).replace(u"-", u"")
python
def generate_id(): """Generate new UUID""" # TODO: Use six.string_type to Py3 compat try: return unicode(uuid1()).replace(u"-", u"") except NameError: return str(uuid1()).replace(u"-", u"")
[ "def", "generate_id", "(", ")", ":", "# TODO: Use six.string_type to Py3 compat", "try", ":", "return", "unicode", "(", "uuid1", "(", ")", ")", ".", "replace", "(", "u\"-\"", ",", "u\"\"", ")", "except", "NameError", ":", "return", "str", "(", "uuid1", "(", ")", ")", ".", "replace", "(", "u\"-\"", ",", "u\"\"", ")" ]
Generate new UUID
[ "Generate", "new", "UUID" ]
993048059dc0aa789d879b69feb79a0f237a60b3
https://github.com/schapman1974/tinymongo/blob/993048059dc0aa789d879b69feb79a0f237a60b3/tinymongo/tinymongo.py#L806-L812
1,177
schapman1974/tinymongo
tinymongo/tinymongo.py
TinyMongoCollection.insert
def insert(self, docs, *args, **kwargs): """Backwards compatibility with insert""" if isinstance(docs, list): return self.insert_many(docs, *args, **kwargs) else: return self.insert_one(docs, *args, **kwargs)
python
def insert(self, docs, *args, **kwargs): """Backwards compatibility with insert""" if isinstance(docs, list): return self.insert_many(docs, *args, **kwargs) else: return self.insert_one(docs, *args, **kwargs)
[ "def", "insert", "(", "self", ",", "docs", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "docs", ",", "list", ")", ":", "return", "self", ".", "insert_many", "(", "docs", ",", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "return", "self", ".", "insert_one", "(", "docs", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Backwards compatibility with insert
[ "Backwards", "compatibility", "with", "insert" ]
993048059dc0aa789d879b69feb79a0f237a60b3
https://github.com/schapman1974/tinymongo/blob/993048059dc0aa789d879b69feb79a0f237a60b3/tinymongo/tinymongo.py#L170-L175
1,178
schapman1974/tinymongo
tinymongo/tinymongo.py
TinyMongoCollection.update
def update(self, query, doc, *args, **kwargs): """BAckwards compatibility with update""" if isinstance(doc, list): return [ self.update_one(query, item, *args, **kwargs) for item in doc ] else: return self.update_one(query, doc, *args, **kwargs)
python
def update(self, query, doc, *args, **kwargs): """BAckwards compatibility with update""" if isinstance(doc, list): return [ self.update_one(query, item, *args, **kwargs) for item in doc ] else: return self.update_one(query, doc, *args, **kwargs)
[ "def", "update", "(", "self", ",", "query", ",", "doc", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "doc", ",", "list", ")", ":", "return", "[", "self", ".", "update_one", "(", "query", ",", "item", ",", "*", "args", ",", "*", "*", "kwargs", ")", "for", "item", "in", "doc", "]", "else", ":", "return", "self", ".", "update_one", "(", "query", ",", "doc", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
BAckwards compatibility with update
[ "BAckwards", "compatibility", "with", "update" ]
993048059dc0aa789d879b69feb79a0f237a60b3
https://github.com/schapman1974/tinymongo/blob/993048059dc0aa789d879b69feb79a0f237a60b3/tinymongo/tinymongo.py#L406-L414
1,179
schapman1974/tinymongo
tinymongo/tinymongo.py
TinyMongoCollection.update_one
def update_one(self, query, doc): """ Updates one element of the collection :param query: dictionary representing the mongo query :param doc: dictionary representing the item to be updated :return: UpdateResult """ if self.table is None: self.build_table() if u"$set" in doc: doc = doc[u"$set"] allcond = self.parse_query(query) try: result = self.table.update(doc, allcond) except: # TODO: check table.update result # check what pymongo does in that case result = None return UpdateResult(raw_result=result)
python
def update_one(self, query, doc): """ Updates one element of the collection :param query: dictionary representing the mongo query :param doc: dictionary representing the item to be updated :return: UpdateResult """ if self.table is None: self.build_table() if u"$set" in doc: doc = doc[u"$set"] allcond = self.parse_query(query) try: result = self.table.update(doc, allcond) except: # TODO: check table.update result # check what pymongo does in that case result = None return UpdateResult(raw_result=result)
[ "def", "update_one", "(", "self", ",", "query", ",", "doc", ")", ":", "if", "self", ".", "table", "is", "None", ":", "self", ".", "build_table", "(", ")", "if", "u\"$set\"", "in", "doc", ":", "doc", "=", "doc", "[", "u\"$set\"", "]", "allcond", "=", "self", ".", "parse_query", "(", "query", ")", "try", ":", "result", "=", "self", ".", "table", ".", "update", "(", "doc", ",", "allcond", ")", "except", ":", "# TODO: check table.update result", "# check what pymongo does in that case", "result", "=", "None", "return", "UpdateResult", "(", "raw_result", "=", "result", ")" ]
Updates one element of the collection :param query: dictionary representing the mongo query :param doc: dictionary representing the item to be updated :return: UpdateResult
[ "Updates", "one", "element", "of", "the", "collection" ]
993048059dc0aa789d879b69feb79a0f237a60b3
https://github.com/schapman1974/tinymongo/blob/993048059dc0aa789d879b69feb79a0f237a60b3/tinymongo/tinymongo.py#L416-L439
1,180
schapman1974/tinymongo
tinymongo/tinymongo.py
TinyMongoCollection.find
def find(self, filter=None, sort=None, skip=None, limit=None, *args, **kwargs): """ Finds all matching results :param query: dictionary representing the mongo query :return: cursor containing the search results """ if self.table is None: self.build_table() if filter is None: result = self.table.all() else: allcond = self.parse_query(filter) try: result = self.table.search(allcond) except (AttributeError, TypeError): result = [] result = TinyMongoCursor( result, sort=sort, skip=skip, limit=limit ) return result
python
def find(self, filter=None, sort=None, skip=None, limit=None, *args, **kwargs): """ Finds all matching results :param query: dictionary representing the mongo query :return: cursor containing the search results """ if self.table is None: self.build_table() if filter is None: result = self.table.all() else: allcond = self.parse_query(filter) try: result = self.table.search(allcond) except (AttributeError, TypeError): result = [] result = TinyMongoCursor( result, sort=sort, skip=skip, limit=limit ) return result
[ "def", "find", "(", "self", ",", "filter", "=", "None", ",", "sort", "=", "None", ",", "skip", "=", "None", ",", "limit", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "table", "is", "None", ":", "self", ".", "build_table", "(", ")", "if", "filter", "is", "None", ":", "result", "=", "self", ".", "table", ".", "all", "(", ")", "else", ":", "allcond", "=", "self", ".", "parse_query", "(", "filter", ")", "try", ":", "result", "=", "self", ".", "table", ".", "search", "(", "allcond", ")", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "result", "=", "[", "]", "result", "=", "TinyMongoCursor", "(", "result", ",", "sort", "=", "sort", ",", "skip", "=", "skip", ",", "limit", "=", "limit", ")", "return", "result" ]
Finds all matching results :param query: dictionary representing the mongo query :return: cursor containing the search results
[ "Finds", "all", "matching", "results" ]
993048059dc0aa789d879b69feb79a0f237a60b3
https://github.com/schapman1974/tinymongo/blob/993048059dc0aa789d879b69feb79a0f237a60b3/tinymongo/tinymongo.py#L441-L469
1,181
schapman1974/tinymongo
tinymongo/tinymongo.py
TinyMongoCollection.find_one
def find_one(self, filter=None): """ Finds one matching query element :param query: dictionary representing the mongo query :return: the resulting document (if found) """ if self.table is None: self.build_table() allcond = self.parse_query(filter) return self.table.get(allcond)
python
def find_one(self, filter=None): """ Finds one matching query element :param query: dictionary representing the mongo query :return: the resulting document (if found) """ if self.table is None: self.build_table() allcond = self.parse_query(filter) return self.table.get(allcond)
[ "def", "find_one", "(", "self", ",", "filter", "=", "None", ")", ":", "if", "self", ".", "table", "is", "None", ":", "self", ".", "build_table", "(", ")", "allcond", "=", "self", ".", "parse_query", "(", "filter", ")", "return", "self", ".", "table", ".", "get", "(", "allcond", ")" ]
Finds one matching query element :param query: dictionary representing the mongo query :return: the resulting document (if found)
[ "Finds", "one", "matching", "query", "element" ]
993048059dc0aa789d879b69feb79a0f237a60b3
https://github.com/schapman1974/tinymongo/blob/993048059dc0aa789d879b69feb79a0f237a60b3/tinymongo/tinymongo.py#L471-L484
1,182
schapman1974/tinymongo
tinymongo/tinymongo.py
TinyMongoCollection.remove
def remove(self, spec_or_id, multi=True, *args, **kwargs): """Backwards compatibility with remove""" if multi: return self.delete_many(spec_or_id) return self.delete_one(spec_or_id)
python
def remove(self, spec_or_id, multi=True, *args, **kwargs): """Backwards compatibility with remove""" if multi: return self.delete_many(spec_or_id) return self.delete_one(spec_or_id)
[ "def", "remove", "(", "self", ",", "spec_or_id", ",", "multi", "=", "True", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "multi", ":", "return", "self", ".", "delete_many", "(", "spec_or_id", ")", "return", "self", ".", "delete_one", "(", "spec_or_id", ")" ]
Backwards compatibility with remove
[ "Backwards", "compatibility", "with", "remove" ]
993048059dc0aa789d879b69feb79a0f237a60b3
https://github.com/schapman1974/tinymongo/blob/993048059dc0aa789d879b69feb79a0f237a60b3/tinymongo/tinymongo.py#L486-L490
1,183
schapman1974/tinymongo
tinymongo/tinymongo.py
TinyMongoCollection.delete_one
def delete_one(self, query): """ Deletes one document from the collection :param query: dictionary representing the mongo query :return: DeleteResult """ item = self.find_one(query) result = self.table.remove(where(u'_id') == item[u'_id']) return DeleteResult(raw_result=result)
python
def delete_one(self, query): """ Deletes one document from the collection :param query: dictionary representing the mongo query :return: DeleteResult """ item = self.find_one(query) result = self.table.remove(where(u'_id') == item[u'_id']) return DeleteResult(raw_result=result)
[ "def", "delete_one", "(", "self", ",", "query", ")", ":", "item", "=", "self", ".", "find_one", "(", "query", ")", "result", "=", "self", ".", "table", ".", "remove", "(", "where", "(", "u'_id'", ")", "==", "item", "[", "u'_id'", "]", ")", "return", "DeleteResult", "(", "raw_result", "=", "result", ")" ]
Deletes one document from the collection :param query: dictionary representing the mongo query :return: DeleteResult
[ "Deletes", "one", "document", "from", "the", "collection" ]
993048059dc0aa789d879b69feb79a0f237a60b3
https://github.com/schapman1974/tinymongo/blob/993048059dc0aa789d879b69feb79a0f237a60b3/tinymongo/tinymongo.py#L492-L502
1,184
schapman1974/tinymongo
tinymongo/tinymongo.py
TinyMongoCollection.delete_many
def delete_many(self, query): """ Removes all items matching the mongo query :param query: dictionary representing the mongo query :return: DeleteResult """ items = self.find(query) result = [ self.table.remove(where(u'_id') == item[u'_id']) for item in items ] if query == {}: # need to reset TinyDB's index for docs order consistency self.table._last_id = 0 return DeleteResult(raw_result=result)
python
def delete_many(self, query): """ Removes all items matching the mongo query :param query: dictionary representing the mongo query :return: DeleteResult """ items = self.find(query) result = [ self.table.remove(where(u'_id') == item[u'_id']) for item in items ] if query == {}: # need to reset TinyDB's index for docs order consistency self.table._last_id = 0 return DeleteResult(raw_result=result)
[ "def", "delete_many", "(", "self", ",", "query", ")", ":", "items", "=", "self", ".", "find", "(", "query", ")", "result", "=", "[", "self", ".", "table", ".", "remove", "(", "where", "(", "u'_id'", ")", "==", "item", "[", "u'_id'", "]", ")", "for", "item", "in", "items", "]", "if", "query", "==", "{", "}", ":", "# need to reset TinyDB's index for docs order consistency", "self", ".", "table", ".", "_last_id", "=", "0", "return", "DeleteResult", "(", "raw_result", "=", "result", ")" ]
Removes all items matching the mongo query :param query: dictionary representing the mongo query :return: DeleteResult
[ "Removes", "all", "items", "matching", "the", "mongo", "query" ]
993048059dc0aa789d879b69feb79a0f237a60b3
https://github.com/schapman1974/tinymongo/blob/993048059dc0aa789d879b69feb79a0f237a60b3/tinymongo/tinymongo.py#L504-L521
1,185
schapman1974/tinymongo
tinymongo/tinymongo.py
TinyMongoCursor.paginate
def paginate(self, skip, limit): """Paginate list of records""" if not self.count() or not limit: return skip = skip or 0 pages = int(ceil(self.count() / float(limit))) limits = {} last = 0 for i in range(pages): current = limit * i limits[last] = current last = current # example with count == 62 # {0: 20, 20: 40, 40: 60, 60: 62} if limit and limit < self.count(): limit = limits.get(skip, self.count()) self.cursordat = self.cursordat[skip: limit]
python
def paginate(self, skip, limit): """Paginate list of records""" if not self.count() or not limit: return skip = skip or 0 pages = int(ceil(self.count() / float(limit))) limits = {} last = 0 for i in range(pages): current = limit * i limits[last] = current last = current # example with count == 62 # {0: 20, 20: 40, 40: 60, 60: 62} if limit and limit < self.count(): limit = limits.get(skip, self.count()) self.cursordat = self.cursordat[skip: limit]
[ "def", "paginate", "(", "self", ",", "skip", ",", "limit", ")", ":", "if", "not", "self", ".", "count", "(", ")", "or", "not", "limit", ":", "return", "skip", "=", "skip", "or", "0", "pages", "=", "int", "(", "ceil", "(", "self", ".", "count", "(", ")", "/", "float", "(", "limit", ")", ")", ")", "limits", "=", "{", "}", "last", "=", "0", "for", "i", "in", "range", "(", "pages", ")", ":", "current", "=", "limit", "*", "i", "limits", "[", "last", "]", "=", "current", "last", "=", "current", "# example with count == 62", "# {0: 20, 20: 40, 40: 60, 60: 62}", "if", "limit", "and", "limit", "<", "self", ".", "count", "(", ")", ":", "limit", "=", "limits", ".", "get", "(", "skip", ",", "self", ".", "count", "(", ")", ")", "self", ".", "cursordat", "=", "self", ".", "cursordat", "[", "skip", ":", "limit", "]" ]
Paginate list of records
[ "Paginate", "list", "of", "records" ]
993048059dc0aa789d879b69feb79a0f237a60b3
https://github.com/schapman1974/tinymongo/blob/993048059dc0aa789d879b69feb79a0f237a60b3/tinymongo/tinymongo.py#L548-L564
1,186
TracyWebTech/django-revproxy
revproxy/utils.py
should_stream
def should_stream(proxy_response): """Function to verify if the proxy_response must be converted into a stream.This will be done by checking the proxy_response content-length and verify if its length is bigger than one stipulated by MIN_STREAMING_LENGTH. :param proxy_response: An Instance of urllib3.response.HTTPResponse :returns: A boolean stating if the proxy_response should be treated as a stream """ content_type = proxy_response.headers.get('Content-Type') if is_html_content_type(content_type): return False try: content_length = int(proxy_response.headers.get('Content-Length', 0)) except ValueError: content_length = 0 if not content_length or content_length > MIN_STREAMING_LENGTH: return True return False
python
def should_stream(proxy_response): """Function to verify if the proxy_response must be converted into a stream.This will be done by checking the proxy_response content-length and verify if its length is bigger than one stipulated by MIN_STREAMING_LENGTH. :param proxy_response: An Instance of urllib3.response.HTTPResponse :returns: A boolean stating if the proxy_response should be treated as a stream """ content_type = proxy_response.headers.get('Content-Type') if is_html_content_type(content_type): return False try: content_length = int(proxy_response.headers.get('Content-Length', 0)) except ValueError: content_length = 0 if not content_length or content_length > MIN_STREAMING_LENGTH: return True return False
[ "def", "should_stream", "(", "proxy_response", ")", ":", "content_type", "=", "proxy_response", ".", "headers", ".", "get", "(", "'Content-Type'", ")", "if", "is_html_content_type", "(", "content_type", ")", ":", "return", "False", "try", ":", "content_length", "=", "int", "(", "proxy_response", ".", "headers", ".", "get", "(", "'Content-Length'", ",", "0", ")", ")", "except", "ValueError", ":", "content_length", "=", "0", "if", "not", "content_length", "or", "content_length", ">", "MIN_STREAMING_LENGTH", ":", "return", "True", "return", "False" ]
Function to verify if the proxy_response must be converted into a stream.This will be done by checking the proxy_response content-length and verify if its length is bigger than one stipulated by MIN_STREAMING_LENGTH. :param proxy_response: An Instance of urllib3.response.HTTPResponse :returns: A boolean stating if the proxy_response should be treated as a stream
[ "Function", "to", "verify", "if", "the", "proxy_response", "must", "be", "converted", "into", "a", "stream", ".", "This", "will", "be", "done", "by", "checking", "the", "proxy_response", "content", "-", "length", "and", "verify", "if", "its", "length", "is", "bigger", "than", "one", "stipulated", "by", "MIN_STREAMING_LENGTH", "." ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/utils.py#L60-L83
1,187
TracyWebTech/django-revproxy
revproxy/utils.py
get_charset
def get_charset(content_type): """Function used to retrieve the charset from a content-type.If there is no charset in the content type then the charset defined on DEFAULT_CHARSET will be returned :param content_type: A string containing a Content-Type header :returns: A string containing the charset """ if not content_type: return DEFAULT_CHARSET matched = _get_charset_re.search(content_type) if matched: # Extract the charset and strip its double quotes return matched.group('charset').replace('"', '') return DEFAULT_CHARSET
python
def get_charset(content_type): """Function used to retrieve the charset from a content-type.If there is no charset in the content type then the charset defined on DEFAULT_CHARSET will be returned :param content_type: A string containing a Content-Type header :returns: A string containing the charset """ if not content_type: return DEFAULT_CHARSET matched = _get_charset_re.search(content_type) if matched: # Extract the charset and strip its double quotes return matched.group('charset').replace('"', '') return DEFAULT_CHARSET
[ "def", "get_charset", "(", "content_type", ")", ":", "if", "not", "content_type", ":", "return", "DEFAULT_CHARSET", "matched", "=", "_get_charset_re", ".", "search", "(", "content_type", ")", "if", "matched", ":", "# Extract the charset and strip its double quotes", "return", "matched", ".", "group", "(", "'charset'", ")", ".", "replace", "(", "'\"'", ",", "''", ")", "return", "DEFAULT_CHARSET" ]
Function used to retrieve the charset from a content-type.If there is no charset in the content type then the charset defined on DEFAULT_CHARSET will be returned :param content_type: A string containing a Content-Type header :returns: A string containing the charset
[ "Function", "used", "to", "retrieve", "the", "charset", "from", "a", "content", "-", "type", ".", "If", "there", "is", "no", "charset", "in", "the", "content", "type", "then", "the", "charset", "defined", "on", "DEFAULT_CHARSET", "will", "be", "returned" ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/utils.py#L86-L101
1,188
TracyWebTech/django-revproxy
revproxy/utils.py
required_header
def required_header(header): """Function that verify if the header parameter is a essential header :param header: A string represented a header :returns: A boolean value that represent if the header is required """ if header in IGNORE_HEADERS: return False if header.startswith('HTTP_') or header == 'CONTENT_TYPE': return True return False
python
def required_header(header): """Function that verify if the header parameter is a essential header :param header: A string represented a header :returns: A boolean value that represent if the header is required """ if header in IGNORE_HEADERS: return False if header.startswith('HTTP_') or header == 'CONTENT_TYPE': return True return False
[ "def", "required_header", "(", "header", ")", ":", "if", "header", "in", "IGNORE_HEADERS", ":", "return", "False", "if", "header", ".", "startswith", "(", "'HTTP_'", ")", "or", "header", "==", "'CONTENT_TYPE'", ":", "return", "True", "return", "False" ]
Function that verify if the header parameter is a essential header :param header: A string represented a header :returns: A boolean value that represent if the header is required
[ "Function", "that", "verify", "if", "the", "header", "parameter", "is", "a", "essential", "header" ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/utils.py#L104-L116
1,189
TracyWebTech/django-revproxy
revproxy/utils.py
normalize_request_headers
def normalize_request_headers(request): r"""Function used to transform header, replacing 'HTTP\_' to '' and replace '_' to '-' :param request: A HttpRequest that will be transformed :returns: A dictionary with the normalized headers """ norm_headers = {} for header, value in request.META.items(): if required_header(header): norm_header = header.replace('HTTP_', '').title().replace('_', '-') norm_headers[norm_header] = value return norm_headers
python
def normalize_request_headers(request): r"""Function used to transform header, replacing 'HTTP\_' to '' and replace '_' to '-' :param request: A HttpRequest that will be transformed :returns: A dictionary with the normalized headers """ norm_headers = {} for header, value in request.META.items(): if required_header(header): norm_header = header.replace('HTTP_', '').title().replace('_', '-') norm_headers[norm_header] = value return norm_headers
[ "def", "normalize_request_headers", "(", "request", ")", ":", "norm_headers", "=", "{", "}", "for", "header", ",", "value", "in", "request", ".", "META", ".", "items", "(", ")", ":", "if", "required_header", "(", "header", ")", ":", "norm_header", "=", "header", ".", "replace", "(", "'HTTP_'", ",", "''", ")", ".", "title", "(", ")", ".", "replace", "(", "'_'", ",", "'-'", ")", "norm_headers", "[", "norm_header", "]", "=", "value", "return", "norm_headers" ]
r"""Function used to transform header, replacing 'HTTP\_' to '' and replace '_' to '-' :param request: A HttpRequest that will be transformed :returns: A dictionary with the normalized headers
[ "r", "Function", "used", "to", "transform", "header", "replacing", "HTTP", "\\", "_", "to", "and", "replace", "_", "to", "-" ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/utils.py#L130-L143
1,190
TracyWebTech/django-revproxy
revproxy/utils.py
encode_items
def encode_items(items): """Function that encode all elements in the list of items passed as a parameter :param items: A list of tuple :returns: A list of tuple with all items encoded in 'utf-8' """ encoded = [] for key, values in items: for value in values: encoded.append((key.encode('utf-8'), value.encode('utf-8'))) return encoded
python
def encode_items(items): """Function that encode all elements in the list of items passed as a parameter :param items: A list of tuple :returns: A list of tuple with all items encoded in 'utf-8' """ encoded = [] for key, values in items: for value in values: encoded.append((key.encode('utf-8'), value.encode('utf-8'))) return encoded
[ "def", "encode_items", "(", "items", ")", ":", "encoded", "=", "[", "]", "for", "key", ",", "values", "in", "items", ":", "for", "value", "in", "values", ":", "encoded", ".", "append", "(", "(", "key", ".", "encode", "(", "'utf-8'", ")", ",", "value", ".", "encode", "(", "'utf-8'", ")", ")", ")", "return", "encoded" ]
Function that encode all elements in the list of items passed as a parameter :param items: A list of tuple :returns: A list of tuple with all items encoded in 'utf-8'
[ "Function", "that", "encode", "all", "elements", "in", "the", "list", "of", "items", "passed", "as", "a", "parameter" ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/utils.py#L146-L157
1,191
TracyWebTech/django-revproxy
revproxy/utils.py
cookie_from_string
def cookie_from_string(cookie_string, strict_cookies=False): """Parser for HTTP header set-cookie The return from this function will be used as parameters for django's response.set_cookie method. Because set_cookie doesn't have parameter comment, this cookie attribute will be ignored. :param cookie_string: A string representing a valid cookie :param strict_cookies: Whether to only accept RFC-compliant cookies :returns: A dictionary containing the cookie_string attributes """ if strict_cookies: cookies = SimpleCookie(COOKIE_PREFIX + cookie_string) if not cookies.keys(): return None cookie_name, = cookies.keys() cookie_dict = {k: v for k, v in cookies[cookie_name].items() if v and k != 'comment'} cookie_dict['key'] = cookie_name cookie_dict['value'] = cookies[cookie_name].value return cookie_dict else: valid_attrs = ('path', 'domain', 'comment', 'expires', 'max_age', 'httponly', 'secure') cookie_dict = {} cookie_parts = cookie_string.split(';') try: key, value = cookie_parts[0].split('=', 1) cookie_dict['key'], cookie_dict['value'] = key, unquote(value) except ValueError: logger.warning('Invalid cookie: `%s`', cookie_string) return None if cookie_dict['value'].startswith('='): logger.warning('Invalid cookie: `%s`', cookie_string) return None for part in cookie_parts[1:]: if '=' in part: attr, value = part.split('=', 1) value = value.strip() else: attr = part value = '' attr = attr.strip().lower() if not attr: continue if attr in valid_attrs: if attr in ('httponly', 'secure'): cookie_dict[attr] = True elif attr in 'comment': # ignoring comment attr as explained in the # function docstring continue else: cookie_dict[attr] = unquote(value) else: logger.warning('Unknown cookie attribute %s', attr) return cookie_dict
python
def cookie_from_string(cookie_string, strict_cookies=False): """Parser for HTTP header set-cookie The return from this function will be used as parameters for django's response.set_cookie method. Because set_cookie doesn't have parameter comment, this cookie attribute will be ignored. :param cookie_string: A string representing a valid cookie :param strict_cookies: Whether to only accept RFC-compliant cookies :returns: A dictionary containing the cookie_string attributes """ if strict_cookies: cookies = SimpleCookie(COOKIE_PREFIX + cookie_string) if not cookies.keys(): return None cookie_name, = cookies.keys() cookie_dict = {k: v for k, v in cookies[cookie_name].items() if v and k != 'comment'} cookie_dict['key'] = cookie_name cookie_dict['value'] = cookies[cookie_name].value return cookie_dict else: valid_attrs = ('path', 'domain', 'comment', 'expires', 'max_age', 'httponly', 'secure') cookie_dict = {} cookie_parts = cookie_string.split(';') try: key, value = cookie_parts[0].split('=', 1) cookie_dict['key'], cookie_dict['value'] = key, unquote(value) except ValueError: logger.warning('Invalid cookie: `%s`', cookie_string) return None if cookie_dict['value'].startswith('='): logger.warning('Invalid cookie: `%s`', cookie_string) return None for part in cookie_parts[1:]: if '=' in part: attr, value = part.split('=', 1) value = value.strip() else: attr = part value = '' attr = attr.strip().lower() if not attr: continue if attr in valid_attrs: if attr in ('httponly', 'secure'): cookie_dict[attr] = True elif attr in 'comment': # ignoring comment attr as explained in the # function docstring continue else: cookie_dict[attr] = unquote(value) else: logger.warning('Unknown cookie attribute %s', attr) return cookie_dict
[ "def", "cookie_from_string", "(", "cookie_string", ",", "strict_cookies", "=", "False", ")", ":", "if", "strict_cookies", ":", "cookies", "=", "SimpleCookie", "(", "COOKIE_PREFIX", "+", "cookie_string", ")", "if", "not", "cookies", ".", "keys", "(", ")", ":", "return", "None", "cookie_name", ",", "=", "cookies", ".", "keys", "(", ")", "cookie_dict", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "cookies", "[", "cookie_name", "]", ".", "items", "(", ")", "if", "v", "and", "k", "!=", "'comment'", "}", "cookie_dict", "[", "'key'", "]", "=", "cookie_name", "cookie_dict", "[", "'value'", "]", "=", "cookies", "[", "cookie_name", "]", ".", "value", "return", "cookie_dict", "else", ":", "valid_attrs", "=", "(", "'path'", ",", "'domain'", ",", "'comment'", ",", "'expires'", ",", "'max_age'", ",", "'httponly'", ",", "'secure'", ")", "cookie_dict", "=", "{", "}", "cookie_parts", "=", "cookie_string", ".", "split", "(", "';'", ")", "try", ":", "key", ",", "value", "=", "cookie_parts", "[", "0", "]", ".", "split", "(", "'='", ",", "1", ")", "cookie_dict", "[", "'key'", "]", ",", "cookie_dict", "[", "'value'", "]", "=", "key", ",", "unquote", "(", "value", ")", "except", "ValueError", ":", "logger", ".", "warning", "(", "'Invalid cookie: `%s`'", ",", "cookie_string", ")", "return", "None", "if", "cookie_dict", "[", "'value'", "]", ".", "startswith", "(", "'='", ")", ":", "logger", ".", "warning", "(", "'Invalid cookie: `%s`'", ",", "cookie_string", ")", "return", "None", "for", "part", "in", "cookie_parts", "[", "1", ":", "]", ":", "if", "'='", "in", "part", ":", "attr", ",", "value", "=", "part", ".", "split", "(", "'='", ",", "1", ")", "value", "=", "value", ".", "strip", "(", ")", "else", ":", "attr", "=", "part", "value", "=", "''", "attr", "=", "attr", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "not", "attr", ":", "continue", "if", "attr", "in", "valid_attrs", ":", "if", "attr", "in", "(", "'httponly'", ",", "'secure'", ")", ":", "cookie_dict", "[", "attr", "]", "=", "True", "elif", "attr", "in", "'comment'", ":", "# ignoring comment attr as explained in the", "# function docstring", "continue", "else", ":", "cookie_dict", "[", "attr", "]", "=", "unquote", "(", "value", ")", "else", ":", "logger", ".", "warning", "(", "'Unknown cookie attribute %s'", ",", "attr", ")", "return", "cookie_dict" ]
Parser for HTTP header set-cookie The return from this function will be used as parameters for django's response.set_cookie method. Because set_cookie doesn't have parameter comment, this cookie attribute will be ignored. :param cookie_string: A string representing a valid cookie :param strict_cookies: Whether to only accept RFC-compliant cookies :returns: A dictionary containing the cookie_string attributes
[ "Parser", "for", "HTTP", "header", "set", "-", "cookie", "The", "return", "from", "this", "function", "will", "be", "used", "as", "parameters", "for", "django", "s", "response", ".", "set_cookie", "method", ".", "Because", "set_cookie", "doesn", "t", "have", "parameter", "comment", "this", "cookie", "attribute", "will", "be", "ignored", "." ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/utils.py#L163-L228
1,192
TracyWebTech/django-revproxy
revproxy/utils.py
unquote
def unquote(value): """Remove wrapping quotes from a string. :param value: A string that might be wrapped in double quotes, such as a HTTP cookie value. :returns: Beginning and ending quotes removed and escaped quotes (``\"``) unescaped """ if len(value) > 1 and value[0] == '"' and value[-1] == '"': value = value[1:-1].replace(r'\"', '"') return value
python
def unquote(value): """Remove wrapping quotes from a string. :param value: A string that might be wrapped in double quotes, such as a HTTP cookie value. :returns: Beginning and ending quotes removed and escaped quotes (``\"``) unescaped """ if len(value) > 1 and value[0] == '"' and value[-1] == '"': value = value[1:-1].replace(r'\"', '"') return value
[ "def", "unquote", "(", "value", ")", ":", "if", "len", "(", "value", ")", ">", "1", "and", "value", "[", "0", "]", "==", "'\"'", "and", "value", "[", "-", "1", "]", "==", "'\"'", ":", "value", "=", "value", "[", "1", ":", "-", "1", "]", ".", "replace", "(", "r'\\\"'", ",", "'\"'", ")", "return", "value" ]
Remove wrapping quotes from a string. :param value: A string that might be wrapped in double quotes, such as a HTTP cookie value. :returns: Beginning and ending quotes removed and escaped quotes (``\"``) unescaped
[ "Remove", "wrapping", "quotes", "from", "a", "string", "." ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/utils.py#L231-L241
1,193
TracyWebTech/django-revproxy
revproxy/transformer.py
asbool
def asbool(value): """Function used to convert certain string values into an appropriated boolean value.If value is not a string the built-in python bool function will be used to convert the passed parameter :param value: an object to be converted to a boolean value :returns: A boolean value """ is_string = isinstance(value, string_types) if is_string: value = value.strip().lower() if value in ('true', 'yes', 'on', 'y', 't', '1',): return True elif value in ('false', 'no', 'off', 'n', 'f', '0'): return False else: raise ValueError("String is not true/false: %r" % value) else: return bool(value)
python
def asbool(value): """Function used to convert certain string values into an appropriated boolean value.If value is not a string the built-in python bool function will be used to convert the passed parameter :param value: an object to be converted to a boolean value :returns: A boolean value """ is_string = isinstance(value, string_types) if is_string: value = value.strip().lower() if value in ('true', 'yes', 'on', 'y', 't', '1',): return True elif value in ('false', 'no', 'off', 'n', 'f', '0'): return False else: raise ValueError("String is not true/false: %r" % value) else: return bool(value)
[ "def", "asbool", "(", "value", ")", ":", "is_string", "=", "isinstance", "(", "value", ",", "string_types", ")", "if", "is_string", ":", "value", "=", "value", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "value", "in", "(", "'true'", ",", "'yes'", ",", "'on'", ",", "'y'", ",", "'t'", ",", "'1'", ",", ")", ":", "return", "True", "elif", "value", "in", "(", "'false'", ",", "'no'", ",", "'off'", ",", "'n'", ",", "'f'", ",", "'0'", ")", ":", "return", "False", "else", ":", "raise", "ValueError", "(", "\"String is not true/false: %r\"", "%", "value", ")", "else", ":", "return", "bool", "(", "value", ")" ]
Function used to convert certain string values into an appropriated boolean value.If value is not a string the built-in python bool function will be used to convert the passed parameter :param value: an object to be converted to a boolean value :returns: A boolean value
[ "Function", "used", "to", "convert", "certain", "string", "values", "into", "an", "appropriated", "boolean", "value", ".", "If", "value", "is", "not", "a", "string", "the", "built", "-", "in", "python", "bool", "function", "will", "be", "used", "to", "convert", "the", "passed", "parameter" ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/transformer.py#L30-L50
1,194
TracyWebTech/django-revproxy
revproxy/transformer.py
DiazoTransformer.should_transform
def should_transform(self): """Determine if we should transform the response :returns: A boolean value """ if not HAS_DIAZO: self.log.info("HAS_DIAZO: false") return False if asbool(self.request.META.get(DIAZO_OFF_REQUEST_HEADER)): self.log.info("DIAZO_OFF_REQUEST_HEADER in request.META: off") return False if asbool(self.response.get(DIAZO_OFF_RESPONSE_HEADER)): self.log.info("DIAZO_OFF_RESPONSE_HEADER in response.get: off") return False if self.request.is_ajax(): self.log.info("Request is AJAX") return False if self.response.streaming: self.log.info("Response has streaming") return False content_type = self.response.get('Content-Type') if not is_html_content_type(content_type): self.log.info("Content-type: false") return False content_encoding = self.response.get('Content-Encoding') if content_encoding in ('zip', 'compress'): self.log.info("Content encode is %s", content_encoding) return False status_code = str(self.response.status_code) if status_code.startswith('3') or \ status_code == '204' or \ status_code == '401': self.log.info("Status code: %s", status_code) return False if len(self.response.content) == 0: self.log.info("Response Content is EMPTY") return False self.log.info("Transform") return True
python
def should_transform(self): """Determine if we should transform the response :returns: A boolean value """ if not HAS_DIAZO: self.log.info("HAS_DIAZO: false") return False if asbool(self.request.META.get(DIAZO_OFF_REQUEST_HEADER)): self.log.info("DIAZO_OFF_REQUEST_HEADER in request.META: off") return False if asbool(self.response.get(DIAZO_OFF_RESPONSE_HEADER)): self.log.info("DIAZO_OFF_RESPONSE_HEADER in response.get: off") return False if self.request.is_ajax(): self.log.info("Request is AJAX") return False if self.response.streaming: self.log.info("Response has streaming") return False content_type = self.response.get('Content-Type') if not is_html_content_type(content_type): self.log.info("Content-type: false") return False content_encoding = self.response.get('Content-Encoding') if content_encoding in ('zip', 'compress'): self.log.info("Content encode is %s", content_encoding) return False status_code = str(self.response.status_code) if status_code.startswith('3') or \ status_code == '204' or \ status_code == '401': self.log.info("Status code: %s", status_code) return False if len(self.response.content) == 0: self.log.info("Response Content is EMPTY") return False self.log.info("Transform") return True
[ "def", "should_transform", "(", "self", ")", ":", "if", "not", "HAS_DIAZO", ":", "self", ".", "log", ".", "info", "(", "\"HAS_DIAZO: false\"", ")", "return", "False", "if", "asbool", "(", "self", ".", "request", ".", "META", ".", "get", "(", "DIAZO_OFF_REQUEST_HEADER", ")", ")", ":", "self", ".", "log", ".", "info", "(", "\"DIAZO_OFF_REQUEST_HEADER in request.META: off\"", ")", "return", "False", "if", "asbool", "(", "self", ".", "response", ".", "get", "(", "DIAZO_OFF_RESPONSE_HEADER", ")", ")", ":", "self", ".", "log", ".", "info", "(", "\"DIAZO_OFF_RESPONSE_HEADER in response.get: off\"", ")", "return", "False", "if", "self", ".", "request", ".", "is_ajax", "(", ")", ":", "self", ".", "log", ".", "info", "(", "\"Request is AJAX\"", ")", "return", "False", "if", "self", ".", "response", ".", "streaming", ":", "self", ".", "log", ".", "info", "(", "\"Response has streaming\"", ")", "return", "False", "content_type", "=", "self", ".", "response", ".", "get", "(", "'Content-Type'", ")", "if", "not", "is_html_content_type", "(", "content_type", ")", ":", "self", ".", "log", ".", "info", "(", "\"Content-type: false\"", ")", "return", "False", "content_encoding", "=", "self", ".", "response", ".", "get", "(", "'Content-Encoding'", ")", "if", "content_encoding", "in", "(", "'zip'", ",", "'compress'", ")", ":", "self", ".", "log", ".", "info", "(", "\"Content encode is %s\"", ",", "content_encoding", ")", "return", "False", "status_code", "=", "str", "(", "self", ".", "response", ".", "status_code", ")", "if", "status_code", ".", "startswith", "(", "'3'", ")", "or", "status_code", "==", "'204'", "or", "status_code", "==", "'401'", ":", "self", ".", "log", ".", "info", "(", "\"Status code: %s\"", ",", "status_code", ")", "return", "False", "if", "len", "(", "self", ".", "response", ".", "content", ")", "==", "0", ":", "self", ".", "log", ".", "info", "(", "\"Response Content is EMPTY\"", ")", "return", "False", "self", ".", "log", ".", "info", "(", "\"Transform\"", ")", "return", "True" ]
Determine if we should transform the response :returns: A boolean value
[ "Determine", "if", "we", "should", "transform", "the", "response" ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/transformer.py#L62-L110
1,195
TracyWebTech/django-revproxy
revproxy/transformer.py
DiazoTransformer.transform
def transform(self, rules, theme_template, is_html5, context_data=None): """Method used to make a transformation on the content of the http response based on the rules and theme_templates passed as paremters :param rules: A file with a set of diazo rules to make a transformation over the original response content :param theme_template: A file containing the template used to format the the original response content :param is_html5: A boolean parameter to identify a html5 doctype :returns: A response with a content transformed based on the rules and theme_template """ if not self.should_transform(): self.log.info("Don't need to be transformed") return self.response theme = loader.render_to_string(theme_template, context=context_data, request=self.request) output_xslt = compile_theme( rules=rules, theme=StringIO(theme), ) transform = etree.XSLT(output_xslt) self.log.debug("Transform: %s", transform) charset = get_charset(self.response.get('Content-Type')) try: decoded_response = self.response.content.decode(charset) except UnicodeDecodeError: decoded_response = self.response.content.decode(charset, 'ignore') self.log.warning("Charset is {} and type of encode used in file is\ different. Some unknown characteres might be\ ignored.".format(charset)) content_doc = etree.fromstring(decoded_response, parser=etree.HTMLParser()) self.response.content = transform(content_doc) if is_html5: self.set_html5_doctype() self.reset_headers() self.log.debug("Response transformer: %s", self.response) return self.response
python
def transform(self, rules, theme_template, is_html5, context_data=None): """Method used to make a transformation on the content of the http response based on the rules and theme_templates passed as paremters :param rules: A file with a set of diazo rules to make a transformation over the original response content :param theme_template: A file containing the template used to format the the original response content :param is_html5: A boolean parameter to identify a html5 doctype :returns: A response with a content transformed based on the rules and theme_template """ if not self.should_transform(): self.log.info("Don't need to be transformed") return self.response theme = loader.render_to_string(theme_template, context=context_data, request=self.request) output_xslt = compile_theme( rules=rules, theme=StringIO(theme), ) transform = etree.XSLT(output_xslt) self.log.debug("Transform: %s", transform) charset = get_charset(self.response.get('Content-Type')) try: decoded_response = self.response.content.decode(charset) except UnicodeDecodeError: decoded_response = self.response.content.decode(charset, 'ignore') self.log.warning("Charset is {} and type of encode used in file is\ different. Some unknown characteres might be\ ignored.".format(charset)) content_doc = etree.fromstring(decoded_response, parser=etree.HTMLParser()) self.response.content = transform(content_doc) if is_html5: self.set_html5_doctype() self.reset_headers() self.log.debug("Response transformer: %s", self.response) return self.response
[ "def", "transform", "(", "self", ",", "rules", ",", "theme_template", ",", "is_html5", ",", "context_data", "=", "None", ")", ":", "if", "not", "self", ".", "should_transform", "(", ")", ":", "self", ".", "log", ".", "info", "(", "\"Don't need to be transformed\"", ")", "return", "self", ".", "response", "theme", "=", "loader", ".", "render_to_string", "(", "theme_template", ",", "context", "=", "context_data", ",", "request", "=", "self", ".", "request", ")", "output_xslt", "=", "compile_theme", "(", "rules", "=", "rules", ",", "theme", "=", "StringIO", "(", "theme", ")", ",", ")", "transform", "=", "etree", ".", "XSLT", "(", "output_xslt", ")", "self", ".", "log", ".", "debug", "(", "\"Transform: %s\"", ",", "transform", ")", "charset", "=", "get_charset", "(", "self", ".", "response", ".", "get", "(", "'Content-Type'", ")", ")", "try", ":", "decoded_response", "=", "self", ".", "response", ".", "content", ".", "decode", "(", "charset", ")", "except", "UnicodeDecodeError", ":", "decoded_response", "=", "self", ".", "response", ".", "content", ".", "decode", "(", "charset", ",", "'ignore'", ")", "self", ".", "log", ".", "warning", "(", "\"Charset is {} and type of encode used in file is\\\n different. Some unknown characteres might be\\\n ignored.\"", ".", "format", "(", "charset", ")", ")", "content_doc", "=", "etree", ".", "fromstring", "(", "decoded_response", ",", "parser", "=", "etree", ".", "HTMLParser", "(", ")", ")", "self", ".", "response", ".", "content", "=", "transform", "(", "content_doc", ")", "if", "is_html5", ":", "self", ".", "set_html5_doctype", "(", ")", "self", ".", "reset_headers", "(", ")", "self", ".", "log", ".", "debug", "(", "\"Response transformer: %s\"", ",", "self", ".", "response", ")", "return", "self", ".", "response" ]
Method used to make a transformation on the content of the http response based on the rules and theme_templates passed as paremters :param rules: A file with a set of diazo rules to make a transformation over the original response content :param theme_template: A file containing the template used to format the the original response content :param is_html5: A boolean parameter to identify a html5 doctype :returns: A response with a content transformed based on the rules and theme_template
[ "Method", "used", "to", "make", "a", "transformation", "on", "the", "content", "of", "the", "http", "response", "based", "on", "the", "rules", "and", "theme_templates", "passed", "as", "paremters" ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/transformer.py#L112-L161
1,196
TracyWebTech/django-revproxy
revproxy/transformer.py
DiazoTransformer.set_html5_doctype
def set_html5_doctype(self): """Method used to transform a doctype in to a properly html5 doctype """ doctype = b'<!DOCTYPE html>\n' content = doctype_re.subn(doctype, self.response.content, 1)[0] self.response.content = content
python
def set_html5_doctype(self): """Method used to transform a doctype in to a properly html5 doctype """ doctype = b'<!DOCTYPE html>\n' content = doctype_re.subn(doctype, self.response.content, 1)[0] self.response.content = content
[ "def", "set_html5_doctype", "(", "self", ")", ":", "doctype", "=", "b'<!DOCTYPE html>\\n'", "content", "=", "doctype_re", ".", "subn", "(", "doctype", ",", "self", ".", "response", ".", "content", ",", "1", ")", "[", "0", "]", "self", ".", "response", ".", "content", "=", "content" ]
Method used to transform a doctype in to a properly html5 doctype
[ "Method", "used", "to", "transform", "a", "doctype", "in", "to", "a", "properly", "html5", "doctype" ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/transformer.py#L170-L175
1,197
TracyWebTech/django-revproxy
revproxy/connection.py
_output
def _output(self, s): """Host header should always be first""" if s.lower().startswith(b'host: '): self._buffer.insert(1, s) else: self._buffer.append(s)
python
def _output(self, s): """Host header should always be first""" if s.lower().startswith(b'host: '): self._buffer.insert(1, s) else: self._buffer.append(s)
[ "def", "_output", "(", "self", ",", "s", ")", ":", "if", "s", ".", "lower", "(", ")", ".", "startswith", "(", "b'host: '", ")", ":", "self", ".", "_buffer", ".", "insert", "(", "1", ",", "s", ")", "else", ":", "self", ".", "_buffer", ".", "append", "(", "s", ")" ]
Host header should always be first
[ "Host", "header", "should", "always", "be", "first" ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/connection.py#L6-L12
1,198
TracyWebTech/django-revproxy
revproxy/response.py
get_django_response
def get_django_response(proxy_response, strict_cookies=False): """This method is used to create an appropriate response based on the Content-Length of the proxy_response. If the content is bigger than MIN_STREAMING_LENGTH, which is found on utils.py, than django.http.StreamingHttpResponse will be created, else a django.http.HTTPResponse will be created instead :param proxy_response: An Instance of urllib3.response.HTTPResponse that will create an appropriate response :param strict_cookies: Whether to only accept RFC-compliant cookies :returns: Returns an appropriate response based on the proxy_response content-length """ status = proxy_response.status headers = proxy_response.headers logger.debug('Proxy response headers: %s', headers) content_type = headers.get('Content-Type') logger.debug('Content-Type: %s', content_type) if should_stream(proxy_response): logger.info('Content-Length is bigger than %s', DEFAULT_AMT) response = StreamingHttpResponse(proxy_response.stream(DEFAULT_AMT), status=status, content_type=content_type) else: content = proxy_response.data or b'' response = HttpResponse(content, status=status, content_type=content_type) logger.info('Normalizing response headers') set_response_headers(response, headers) logger.debug('Response headers: %s', getattr(response, '_headers')) cookies = proxy_response.headers.getlist('set-cookie') logger.info('Checking for invalid cookies') for cookie_string in cookies: cookie_dict = cookie_from_string(cookie_string, strict_cookies=strict_cookies) # if cookie is invalid cookie_dict will be None if cookie_dict: response.set_cookie(**cookie_dict) logger.debug('Response cookies: %s', response.cookies) return response
python
def get_django_response(proxy_response, strict_cookies=False): """This method is used to create an appropriate response based on the Content-Length of the proxy_response. If the content is bigger than MIN_STREAMING_LENGTH, which is found on utils.py, than django.http.StreamingHttpResponse will be created, else a django.http.HTTPResponse will be created instead :param proxy_response: An Instance of urllib3.response.HTTPResponse that will create an appropriate response :param strict_cookies: Whether to only accept RFC-compliant cookies :returns: Returns an appropriate response based on the proxy_response content-length """ status = proxy_response.status headers = proxy_response.headers logger.debug('Proxy response headers: %s', headers) content_type = headers.get('Content-Type') logger.debug('Content-Type: %s', content_type) if should_stream(proxy_response): logger.info('Content-Length is bigger than %s', DEFAULT_AMT) response = StreamingHttpResponse(proxy_response.stream(DEFAULT_AMT), status=status, content_type=content_type) else: content = proxy_response.data or b'' response = HttpResponse(content, status=status, content_type=content_type) logger.info('Normalizing response headers') set_response_headers(response, headers) logger.debug('Response headers: %s', getattr(response, '_headers')) cookies = proxy_response.headers.getlist('set-cookie') logger.info('Checking for invalid cookies') for cookie_string in cookies: cookie_dict = cookie_from_string(cookie_string, strict_cookies=strict_cookies) # if cookie is invalid cookie_dict will be None if cookie_dict: response.set_cookie(**cookie_dict) logger.debug('Response cookies: %s', response.cookies) return response
[ "def", "get_django_response", "(", "proxy_response", ",", "strict_cookies", "=", "False", ")", ":", "status", "=", "proxy_response", ".", "status", "headers", "=", "proxy_response", ".", "headers", "logger", ".", "debug", "(", "'Proxy response headers: %s'", ",", "headers", ")", "content_type", "=", "headers", ".", "get", "(", "'Content-Type'", ")", "logger", ".", "debug", "(", "'Content-Type: %s'", ",", "content_type", ")", "if", "should_stream", "(", "proxy_response", ")", ":", "logger", ".", "info", "(", "'Content-Length is bigger than %s'", ",", "DEFAULT_AMT", ")", "response", "=", "StreamingHttpResponse", "(", "proxy_response", ".", "stream", "(", "DEFAULT_AMT", ")", ",", "status", "=", "status", ",", "content_type", "=", "content_type", ")", "else", ":", "content", "=", "proxy_response", ".", "data", "or", "b''", "response", "=", "HttpResponse", "(", "content", ",", "status", "=", "status", ",", "content_type", "=", "content_type", ")", "logger", ".", "info", "(", "'Normalizing response headers'", ")", "set_response_headers", "(", "response", ",", "headers", ")", "logger", ".", "debug", "(", "'Response headers: %s'", ",", "getattr", "(", "response", ",", "'_headers'", ")", ")", "cookies", "=", "proxy_response", ".", "headers", ".", "getlist", "(", "'set-cookie'", ")", "logger", ".", "info", "(", "'Checking for invalid cookies'", ")", "for", "cookie_string", "in", "cookies", ":", "cookie_dict", "=", "cookie_from_string", "(", "cookie_string", ",", "strict_cookies", "=", "strict_cookies", ")", "# if cookie is invalid cookie_dict will be None", "if", "cookie_dict", ":", "response", ".", "set_cookie", "(", "*", "*", "cookie_dict", ")", "logger", ".", "debug", "(", "'Response cookies: %s'", ",", "response", ".", "cookies", ")", "return", "response" ]
This method is used to create an appropriate response based on the Content-Length of the proxy_response. If the content is bigger than MIN_STREAMING_LENGTH, which is found on utils.py, than django.http.StreamingHttpResponse will be created, else a django.http.HTTPResponse will be created instead :param proxy_response: An Instance of urllib3.response.HTTPResponse that will create an appropriate response :param strict_cookies: Whether to only accept RFC-compliant cookies :returns: Returns an appropriate response based on the proxy_response content-length
[ "This", "method", "is", "used", "to", "create", "an", "appropriate", "response", "based", "on", "the", "Content", "-", "Length", "of", "the", "proxy_response", ".", "If", "the", "content", "is", "bigger", "than", "MIN_STREAMING_LENGTH", "which", "is", "found", "on", "utils", ".", "py", "than", "django", ".", "http", ".", "StreamingHttpResponse", "will", "be", "created", "else", "a", "django", ".", "http", ".", "HTTPResponse", "will", "be", "created", "instead" ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/response.py#L13-L61
1,199
TracyWebTech/django-revproxy
revproxy/views.py
ProxyView.get_request_headers
def get_request_headers(self): """Return request headers that will be sent to upstream. The header REMOTE_USER is set to the current user if AuthenticationMiddleware is enabled and the view's add_remote_user property is True. .. versionadded:: 0.9.8 """ request_headers = self.get_proxy_request_headers(self.request) if (self.add_remote_user and hasattr(self.request, 'user') and self.request.user.is_active): request_headers['REMOTE_USER'] = self.request.user.get_username() self.log.info("REMOTE_USER set") return request_headers
python
def get_request_headers(self): """Return request headers that will be sent to upstream. The header REMOTE_USER is set to the current user if AuthenticationMiddleware is enabled and the view's add_remote_user property is True. .. versionadded:: 0.9.8 """ request_headers = self.get_proxy_request_headers(self.request) if (self.add_remote_user and hasattr(self.request, 'user') and self.request.user.is_active): request_headers['REMOTE_USER'] = self.request.user.get_username() self.log.info("REMOTE_USER set") return request_headers
[ "def", "get_request_headers", "(", "self", ")", ":", "request_headers", "=", "self", ".", "get_proxy_request_headers", "(", "self", ".", "request", ")", "if", "(", "self", ".", "add_remote_user", "and", "hasattr", "(", "self", ".", "request", ",", "'user'", ")", "and", "self", ".", "request", ".", "user", ".", "is_active", ")", ":", "request_headers", "[", "'REMOTE_USER'", "]", "=", "self", ".", "request", ".", "user", ".", "get_username", "(", ")", "self", ".", "log", ".", "info", "(", "\"REMOTE_USER set\"", ")", "return", "request_headers" ]
Return request headers that will be sent to upstream. The header REMOTE_USER is set to the current user if AuthenticationMiddleware is enabled and the view's add_remote_user property is True. .. versionadded:: 0.9.8
[ "Return", "request", "headers", "that", "will", "be", "sent", "to", "upstream", "." ]
b8d1d9e44eadbafbd16bc03f04d15560089d4472
https://github.com/TracyWebTech/django-revproxy/blob/b8d1d9e44eadbafbd16bc03f04d15560089d4472/revproxy/views.py#L117-L134