nwo
stringlengths 5
106
| sha
stringlengths 40
40
| path
stringlengths 4
174
| language
stringclasses 1
value | identifier
stringlengths 1
140
| parameters
stringlengths 0
87.7k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
426k
| docstring
stringlengths 0
64.3k
| docstring_summary
stringlengths 0
26.3k
| docstring_tokens
list | function
stringlengths 18
4.83M
| function_tokens
list | url
stringlengths 83
304
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
linkchecker/linkchecker
|
d1078ed8480e5cfc4264d0dbf026b45b45aede4d
|
linkcheck/lc_cgi.py
|
python
|
log
|
(env, msg)
|
Log message to WSGI error output.
|
Log message to WSGI error output.
|
[
"Log",
"message",
"to",
"WSGI",
"error",
"output",
"."
] |
def log(env, msg):
"""Log message to WSGI error output."""
logfile = env['wsgi.errors']
logfile.write("%s\n" % msg)
|
[
"def",
"log",
"(",
"env",
",",
"msg",
")",
":",
"logfile",
"=",
"env",
"[",
"'wsgi.errors'",
"]",
"logfile",
".",
"write",
"(",
"\"%s\\n\"",
"%",
"msg",
")"
] |
https://github.com/linkchecker/linkchecker/blob/d1078ed8480e5cfc4264d0dbf026b45b45aede4d/linkcheck/lc_cgi.py#L241-L244
|
||
tanghaibao/jcvi
|
5e720870c0928996f8b77a38208106ff0447ccb6
|
jcvi/variation/str.py
|
python
|
data
|
(args)
|
%prog data data.bin samples.ids STR.ids meta.tsv
Make data.tsv based on meta.tsv.
|
%prog data data.bin samples.ids STR.ids meta.tsv
|
[
"%prog",
"data",
"data",
".",
"bin",
"samples",
".",
"ids",
"STR",
".",
"ids",
"meta",
".",
"tsv"
] |
def data(args):
"""
%prog data data.bin samples.ids STR.ids meta.tsv
Make data.tsv based on meta.tsv.
"""
p = OptionParser(data.__doc__)
p.add_option(
"--notsv", default=False, action="store_true", help="Do not write data.tsv"
)
opts, args = p.parse_args(args)
if len(args) != 4:
sys.exit(not p.print_help())
databin, sampleids, strids, metafile = args
final_columns, percentiles = read_meta(metafile)
df, m, samples, loci = read_binfile(databin, sampleids, strids)
# Clean the data
m %= 1000 # Get the larger of the two alleles
m[m == 999] = -1 # Missing data
final = set(final_columns)
remove = []
for i, locus in enumerate(loci):
if locus not in final:
remove.append(locus)
continue
pf = "STRs_{}_SEARCH".format(timestamp())
filteredstrids = "{}.STR.ids".format(pf)
fw = open(filteredstrids, "w")
print("\n".join(final_columns), file=fw)
fw.close()
logging.debug(
"Dropped {} columns; Retained {} columns (`{}`)".format(
len(remove), len(final_columns), filteredstrids
)
)
# Remove low-quality columns!
df.drop(remove, inplace=True, axis=1)
df.columns = final_columns
filtered_bin = "{}.data.bin".format(pf)
if need_update(databin, filtered_bin):
m = df.as_matrix()
m.tofile(filtered_bin)
logging.debug("Filtered binary matrix written to `{}`".format(filtered_bin))
# Write data output
filtered_tsv = "{}.data.tsv".format(pf)
if not opts.notsv and need_update(databin, filtered_tsv):
df.to_csv(filtered_tsv, sep="\t", index_label="SampleKey")
|
[
"def",
"data",
"(",
"args",
")",
":",
"p",
"=",
"OptionParser",
"(",
"data",
".",
"__doc__",
")",
"p",
".",
"add_option",
"(",
"\"--notsv\"",
",",
"default",
"=",
"False",
",",
"action",
"=",
"\"store_true\"",
",",
"help",
"=",
"\"Do not write data.tsv\"",
")",
"opts",
",",
"args",
"=",
"p",
".",
"parse_args",
"(",
"args",
")",
"if",
"len",
"(",
"args",
")",
"!=",
"4",
":",
"sys",
".",
"exit",
"(",
"not",
"p",
".",
"print_help",
"(",
")",
")",
"databin",
",",
"sampleids",
",",
"strids",
",",
"metafile",
"=",
"args",
"final_columns",
",",
"percentiles",
"=",
"read_meta",
"(",
"metafile",
")",
"df",
",",
"m",
",",
"samples",
",",
"loci",
"=",
"read_binfile",
"(",
"databin",
",",
"sampleids",
",",
"strids",
")",
"# Clean the data",
"m",
"%=",
"1000",
"# Get the larger of the two alleles",
"m",
"[",
"m",
"==",
"999",
"]",
"=",
"-",
"1",
"# Missing data",
"final",
"=",
"set",
"(",
"final_columns",
")",
"remove",
"=",
"[",
"]",
"for",
"i",
",",
"locus",
"in",
"enumerate",
"(",
"loci",
")",
":",
"if",
"locus",
"not",
"in",
"final",
":",
"remove",
".",
"append",
"(",
"locus",
")",
"continue",
"pf",
"=",
"\"STRs_{}_SEARCH\"",
".",
"format",
"(",
"timestamp",
"(",
")",
")",
"filteredstrids",
"=",
"\"{}.STR.ids\"",
".",
"format",
"(",
"pf",
")",
"fw",
"=",
"open",
"(",
"filteredstrids",
",",
"\"w\"",
")",
"print",
"(",
"\"\\n\"",
".",
"join",
"(",
"final_columns",
")",
",",
"file",
"=",
"fw",
")",
"fw",
".",
"close",
"(",
")",
"logging",
".",
"debug",
"(",
"\"Dropped {} columns; Retained {} columns (`{}`)\"",
".",
"format",
"(",
"len",
"(",
"remove",
")",
",",
"len",
"(",
"final_columns",
")",
",",
"filteredstrids",
")",
")",
"# Remove low-quality columns!",
"df",
".",
"drop",
"(",
"remove",
",",
"inplace",
"=",
"True",
",",
"axis",
"=",
"1",
")",
"df",
".",
"columns",
"=",
"final_columns",
"filtered_bin",
"=",
"\"{}.data.bin\"",
".",
"format",
"(",
"pf",
")",
"if",
"need_update",
"(",
"databin",
",",
"filtered_bin",
")",
":",
"m",
"=",
"df",
".",
"as_matrix",
"(",
")",
"m",
".",
"tofile",
"(",
"filtered_bin",
")",
"logging",
".",
"debug",
"(",
"\"Filtered binary matrix written to `{}`\"",
".",
"format",
"(",
"filtered_bin",
")",
")",
"# Write data output",
"filtered_tsv",
"=",
"\"{}.data.tsv\"",
".",
"format",
"(",
"pf",
")",
"if",
"not",
"opts",
".",
"notsv",
"and",
"need_update",
"(",
"databin",
",",
"filtered_tsv",
")",
":",
"df",
".",
"to_csv",
"(",
"filtered_tsv",
",",
"sep",
"=",
"\"\\t\"",
",",
"index_label",
"=",
"\"SampleKey\"",
")"
] |
https://github.com/tanghaibao/jcvi/blob/5e720870c0928996f8b77a38208106ff0447ccb6/jcvi/variation/str.py#L788-L842
|
||
leancloud/satori
|
701caccbd4fe45765001ca60435c0cb499477c03
|
satori-rules/plugin/libs/redis/client.py
|
python
|
BasePipeline._execute_transaction
|
(self, connection, commands, raise_on_error)
|
return data
|
[] |
def _execute_transaction(self, connection, commands, raise_on_error):
cmds = chain([(('MULTI', ), {})], commands, [(('EXEC', ), {})])
all_cmds = connection.pack_commands([args for args, _ in cmds])
connection.send_packed_command(all_cmds)
errors = []
# parse off the response for MULTI
# NOTE: we need to handle ResponseErrors here and continue
# so that we read all the additional command messages from
# the socket
try:
self.parse_response(connection, '_')
except ResponseError:
errors.append((0, sys.exc_info()[1]))
# and all the other commands
for i, command in enumerate(commands):
try:
self.parse_response(connection, '_')
except ResponseError:
ex = sys.exc_info()[1]
self.annotate_exception(ex, i + 1, command[0])
errors.append((i, ex))
# parse the EXEC.
try:
response = self.parse_response(connection, '_')
except ExecAbortError:
if self.explicit_transaction:
self.immediate_execute_command('DISCARD')
if errors:
raise errors[0][1]
raise sys.exc_info()[1]
if response is None:
raise WatchError("Watched variable changed.")
# put any parse errors into the response
for i, e in errors:
response.insert(i, e)
if len(response) != len(commands):
self.connection.disconnect()
raise ResponseError("Wrong number of response items from "
"pipeline execution")
# find any errors in the response and raise if necessary
if raise_on_error:
self.raise_first_error(commands, response)
# We have to run response callbacks manually
data = []
for r, cmd in izip(response, commands):
if not isinstance(r, Exception):
args, options = cmd
command_name = args[0]
if command_name in self.response_callbacks:
r = self.response_callbacks[command_name](r, **options)
data.append(r)
return data
|
[
"def",
"_execute_transaction",
"(",
"self",
",",
"connection",
",",
"commands",
",",
"raise_on_error",
")",
":",
"cmds",
"=",
"chain",
"(",
"[",
"(",
"(",
"'MULTI'",
",",
")",
",",
"{",
"}",
")",
"]",
",",
"commands",
",",
"[",
"(",
"(",
"'EXEC'",
",",
")",
",",
"{",
"}",
")",
"]",
")",
"all_cmds",
"=",
"connection",
".",
"pack_commands",
"(",
"[",
"args",
"for",
"args",
",",
"_",
"in",
"cmds",
"]",
")",
"connection",
".",
"send_packed_command",
"(",
"all_cmds",
")",
"errors",
"=",
"[",
"]",
"# parse off the response for MULTI",
"# NOTE: we need to handle ResponseErrors here and continue",
"# so that we read all the additional command messages from",
"# the socket",
"try",
":",
"self",
".",
"parse_response",
"(",
"connection",
",",
"'_'",
")",
"except",
"ResponseError",
":",
"errors",
".",
"append",
"(",
"(",
"0",
",",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
")",
")",
"# and all the other commands",
"for",
"i",
",",
"command",
"in",
"enumerate",
"(",
"commands",
")",
":",
"try",
":",
"self",
".",
"parse_response",
"(",
"connection",
",",
"'_'",
")",
"except",
"ResponseError",
":",
"ex",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
"self",
".",
"annotate_exception",
"(",
"ex",
",",
"i",
"+",
"1",
",",
"command",
"[",
"0",
"]",
")",
"errors",
".",
"append",
"(",
"(",
"i",
",",
"ex",
")",
")",
"# parse the EXEC.",
"try",
":",
"response",
"=",
"self",
".",
"parse_response",
"(",
"connection",
",",
"'_'",
")",
"except",
"ExecAbortError",
":",
"if",
"self",
".",
"explicit_transaction",
":",
"self",
".",
"immediate_execute_command",
"(",
"'DISCARD'",
")",
"if",
"errors",
":",
"raise",
"errors",
"[",
"0",
"]",
"[",
"1",
"]",
"raise",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
"if",
"response",
"is",
"None",
":",
"raise",
"WatchError",
"(",
"\"Watched variable changed.\"",
")",
"# put any parse errors into the response",
"for",
"i",
",",
"e",
"in",
"errors",
":",
"response",
".",
"insert",
"(",
"i",
",",
"e",
")",
"if",
"len",
"(",
"response",
")",
"!=",
"len",
"(",
"commands",
")",
":",
"self",
".",
"connection",
".",
"disconnect",
"(",
")",
"raise",
"ResponseError",
"(",
"\"Wrong number of response items from \"",
"\"pipeline execution\"",
")",
"# find any errors in the response and raise if necessary",
"if",
"raise_on_error",
":",
"self",
".",
"raise_first_error",
"(",
"commands",
",",
"response",
")",
"# We have to run response callbacks manually",
"data",
"=",
"[",
"]",
"for",
"r",
",",
"cmd",
"in",
"izip",
"(",
"response",
",",
"commands",
")",
":",
"if",
"not",
"isinstance",
"(",
"r",
",",
"Exception",
")",
":",
"args",
",",
"options",
"=",
"cmd",
"command_name",
"=",
"args",
"[",
"0",
"]",
"if",
"command_name",
"in",
"self",
".",
"response_callbacks",
":",
"r",
"=",
"self",
".",
"response_callbacks",
"[",
"command_name",
"]",
"(",
"r",
",",
"*",
"*",
"options",
")",
"data",
".",
"append",
"(",
"r",
")",
"return",
"data"
] |
https://github.com/leancloud/satori/blob/701caccbd4fe45765001ca60435c0cb499477c03/satori-rules/plugin/libs/redis/client.py#L2492-L2551
|
|||
niosus/EasyClangComplete
|
3b16eb17735aaa3f56bb295fc5481b269ee9f2ef
|
plugin/clang/cindex33.py
|
python
|
Diagnostic.option
|
(self)
|
return conf.lib.clang_getDiagnosticOption(self, None)
|
The command-line option that enables this diagnostic.
|
The command-line option that enables this diagnostic.
|
[
"The",
"command",
"-",
"line",
"option",
"that",
"enables",
"this",
"diagnostic",
"."
] |
def option(self):
"""The command-line option that enables this diagnostic."""
return conf.lib.clang_getDiagnosticOption(self, None)
|
[
"def",
"option",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_getDiagnosticOption",
"(",
"self",
",",
"None",
")"
] |
https://github.com/niosus/EasyClangComplete/blob/3b16eb17735aaa3f56bb295fc5481b269ee9f2ef/plugin/clang/cindex33.py#L353-L355
|
|
openshift/openshift-tools
|
1188778e728a6e4781acf728123e5b356380fe6f
|
openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_openshift/src/ansible/oc_configmap.py
|
python
|
main
|
()
|
ansible oc module for managing OpenShift configmap objects
|
ansible oc module for managing OpenShift configmap objects
|
[
"ansible",
"oc",
"module",
"for",
"managing",
"OpenShift",
"configmap",
"objects"
] |
def main():
'''
ansible oc module for managing OpenShift configmap objects
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
namespace=dict(default='default', type='str'),
name=dict(default=None, required=True, type='str'),
from_file=dict(default=None, type='dict'),
from_literal=dict(default=None, type='dict'),
),
supports_check_mode=True,
)
rval = OCConfigMap.run_ansible(module.params, module.check_mode)
if 'failed' in rval:
module.fail_json(**rval)
module.exit_json(**rval)
|
[
"def",
"main",
"(",
")",
":",
"module",
"=",
"AnsibleModule",
"(",
"argument_spec",
"=",
"dict",
"(",
"kubeconfig",
"=",
"dict",
"(",
"default",
"=",
"'/etc/origin/master/admin.kubeconfig'",
",",
"type",
"=",
"'str'",
")",
",",
"state",
"=",
"dict",
"(",
"default",
"=",
"'present'",
",",
"type",
"=",
"'str'",
",",
"choices",
"=",
"[",
"'present'",
",",
"'absent'",
",",
"'list'",
"]",
")",
",",
"debug",
"=",
"dict",
"(",
"default",
"=",
"False",
",",
"type",
"=",
"'bool'",
")",
",",
"namespace",
"=",
"dict",
"(",
"default",
"=",
"'default'",
",",
"type",
"=",
"'str'",
")",
",",
"name",
"=",
"dict",
"(",
"default",
"=",
"None",
",",
"required",
"=",
"True",
",",
"type",
"=",
"'str'",
")",
",",
"from_file",
"=",
"dict",
"(",
"default",
"=",
"None",
",",
"type",
"=",
"'dict'",
")",
",",
"from_literal",
"=",
"dict",
"(",
"default",
"=",
"None",
",",
"type",
"=",
"'dict'",
")",
",",
")",
",",
"supports_check_mode",
"=",
"True",
",",
")",
"rval",
"=",
"OCConfigMap",
".",
"run_ansible",
"(",
"module",
".",
"params",
",",
"module",
".",
"check_mode",
")",
"if",
"'failed'",
"in",
"rval",
":",
"module",
".",
"fail_json",
"(",
"*",
"*",
"rval",
")",
"module",
".",
"exit_json",
"(",
"*",
"*",
"rval",
")"
] |
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_openshift/src/ansible/oc_configmap.py#L5-L29
|
||
CoinCheung/BiSeNet
|
f9231b7c971413e6ebdfcd961fbea53417b18851
|
tools/demo_video.py
|
python
|
infer_batch
|
(frames)
|
[] |
def infer_batch(frames):
frames = torch.cat(frames, dim=0).cuda()
H, W = frames.size()[2:]
frames = F.interpolate(frames, size=(768, 768), mode='bilinear',
align_corners=False) # must be divisible by 32
out = net(frames)[0]
out = F.interpolate(out, size=(H, W), mode='bilinear',
align_corners=False).argmax(dim=1).detach().cpu()
out_q.put(out)
|
[
"def",
"infer_batch",
"(",
"frames",
")",
":",
"frames",
"=",
"torch",
".",
"cat",
"(",
"frames",
",",
"dim",
"=",
"0",
")",
".",
"cuda",
"(",
")",
"H",
",",
"W",
"=",
"frames",
".",
"size",
"(",
")",
"[",
"2",
":",
"]",
"frames",
"=",
"F",
".",
"interpolate",
"(",
"frames",
",",
"size",
"=",
"(",
"768",
",",
"768",
")",
",",
"mode",
"=",
"'bilinear'",
",",
"align_corners",
"=",
"False",
")",
"# must be divisible by 32",
"out",
"=",
"net",
"(",
"frames",
")",
"[",
"0",
"]",
"out",
"=",
"F",
".",
"interpolate",
"(",
"out",
",",
"size",
"=",
"(",
"H",
",",
"W",
")",
",",
"mode",
"=",
"'bilinear'",
",",
"align_corners",
"=",
"False",
")",
".",
"argmax",
"(",
"dim",
"=",
"1",
")",
".",
"detach",
"(",
")",
".",
"cpu",
"(",
")",
"out_q",
".",
"put",
"(",
"out",
")"
] |
https://github.com/CoinCheung/BiSeNet/blob/f9231b7c971413e6ebdfcd961fbea53417b18851/tools/demo_video.py#L95-L103
|
||||
misterch0c/shadowbroker
|
e3a069bea47a2c1009697941ac214adc6f90aa8d
|
windows/Resources/Python/Core/Lib/lib-tk/ttk.py
|
python
|
Combobox.__init__
|
(self, master=None, **kw)
|
Construct a Ttk Combobox widget with the parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
exportselection, justify, height, postcommand, state,
textvariable, values, width
|
Construct a Ttk Combobox widget with the parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
exportselection, justify, height, postcommand, state,
textvariable, values, width
|
[
"Construct",
"a",
"Ttk",
"Combobox",
"widget",
"with",
"the",
"parent",
"master",
".",
"STANDARD",
"OPTIONS",
"class",
"cursor",
"style",
"takefocus",
"WIDGET",
"-",
"SPECIFIC",
"OPTIONS",
"exportselection",
"justify",
"height",
"postcommand",
"state",
"textvariable",
"values",
"width"
] |
def __init__(self, master=None, **kw):
"""Construct a Ttk Combobox widget with the parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
exportselection, justify, height, postcommand, state,
textvariable, values, width
"""
if 'values' in kw:
kw['values'] = _format_optdict({'v': kw['values']})[1]
Entry.__init__(self, master, 'ttk::combobox', **kw)
|
[
"def",
"__init__",
"(",
"self",
",",
"master",
"=",
"None",
",",
"*",
"*",
"kw",
")",
":",
"if",
"'values'",
"in",
"kw",
":",
"kw",
"[",
"'values'",
"]",
"=",
"_format_optdict",
"(",
"{",
"'v'",
":",
"kw",
"[",
"'values'",
"]",
"}",
")",
"[",
"1",
"]",
"Entry",
".",
"__init__",
"(",
"self",
",",
"master",
",",
"'ttk::combobox'",
",",
"*",
"*",
"kw",
")"
] |
https://github.com/misterch0c/shadowbroker/blob/e3a069bea47a2c1009697941ac214adc6f90aa8d/windows/Resources/Python/Core/Lib/lib-tk/ttk.py#L604-L618
|
||
nucleic/enaml
|
65c2a2a2d765e88f2e1103046680571894bb41ed
|
enaml/qt/qt_dock_pane.py
|
python
|
QCustomDockWidget._hideTitleBar
|
(self)
|
Hides the title bar for the widget.
|
Hides the title bar for the widget.
|
[
"Hides",
"the",
"title",
"bar",
"for",
"the",
"widget",
"."
] |
def _hideTitleBar(self):
""" Hides the title bar for the widget.
"""
if self.titleBarWidget() is None:
self.setTitleBarWidget(QWidget())
|
[
"def",
"_hideTitleBar",
"(",
"self",
")",
":",
"if",
"self",
".",
"titleBarWidget",
"(",
")",
"is",
"None",
":",
"self",
".",
"setTitleBarWidget",
"(",
"QWidget",
"(",
")",
")"
] |
https://github.com/nucleic/enaml/blob/65c2a2a2d765e88f2e1103046680571894bb41ed/enaml/qt/qt_dock_pane.py#L90-L95
|
||
thearn/Python-Arduino-Command-API
|
610171b3ae153542aca42d354fbb26c32027f38f
|
examples.py
|
python
|
adjustBrightness
|
(pot_pin, led_pin, baud, port="")
|
Adjusts brightness of an LED using a
potentiometer.
|
Adjusts brightness of an LED using a
potentiometer.
|
[
"Adjusts",
"brightness",
"of",
"an",
"LED",
"using",
"a",
"potentiometer",
"."
] |
def adjustBrightness(pot_pin, led_pin, baud, port=""):
"""
Adjusts brightness of an LED using a
potentiometer.
"""
board = Arduino(baud, port=port)
while True:
time.sleep(0.01)
val = board.analogRead(pot_pin) / 4
print val
board.analogWrite(led_pin, val)
|
[
"def",
"adjustBrightness",
"(",
"pot_pin",
",",
"led_pin",
",",
"baud",
",",
"port",
"=",
"\"\"",
")",
":",
"board",
"=",
"Arduino",
"(",
"baud",
",",
"port",
"=",
"port",
")",
"while",
"True",
":",
"time",
".",
"sleep",
"(",
"0.01",
")",
"val",
"=",
"board",
".",
"analogRead",
"(",
"pot_pin",
")",
"/",
"4",
"print",
"val",
"board",
".",
"analogWrite",
"(",
"led_pin",
",",
"val",
")"
] |
https://github.com/thearn/Python-Arduino-Command-API/blob/610171b3ae153542aca42d354fbb26c32027f38f/examples.py#L37-L47
|
||
marcelm/cutadapt
|
c63043e0f43970619bb7f8c1242912c236d60545
|
src/cutadapt/parser.py
|
python
|
AdapterParser.parse_multi
|
(self, type_spec_pairs: List[Tuple[str, str]])
|
return adapters
|
Parse all three types of commandline options that can be used to
specify adapters. adapters must be a list of (str, str) pairs, where the first is
the adapter type (either 'front', 'back' or 'anywhere') and the second is the
adapter specification given on the commandline
Return a list of appropriate Adapter classes.
|
Parse all three types of commandline options that can be used to
specify adapters. adapters must be a list of (str, str) pairs, where the first is
the adapter type (either 'front', 'back' or 'anywhere') and the second is the
adapter specification given on the commandline
|
[
"Parse",
"all",
"three",
"types",
"of",
"commandline",
"options",
"that",
"can",
"be",
"used",
"to",
"specify",
"adapters",
".",
"adapters",
"must",
"be",
"a",
"list",
"of",
"(",
"str",
"str",
")",
"pairs",
"where",
"the",
"first",
"is",
"the",
"adapter",
"type",
"(",
"either",
"front",
"back",
"or",
"anywhere",
")",
"and",
"the",
"second",
"is",
"the",
"adapter",
"specification",
"given",
"on",
"the",
"commandline"
] |
def parse_multi(self, type_spec_pairs: List[Tuple[str, str]]) -> List[Adapter]:
"""
Parse all three types of commandline options that can be used to
specify adapters. adapters must be a list of (str, str) pairs, where the first is
the adapter type (either 'front', 'back' or 'anywhere') and the second is the
adapter specification given on the commandline
Return a list of appropriate Adapter classes.
"""
adapters = [] # type: List[Adapter]
for cmdline_type, spec in type_spec_pairs:
if cmdline_type not in {'front', 'back', 'anywhere'}:
raise ValueError('adapter type must be front, back or anywhere')
adapters.extend(self.parse(spec, cmdline_type))
return adapters
|
[
"def",
"parse_multi",
"(",
"self",
",",
"type_spec_pairs",
":",
"List",
"[",
"Tuple",
"[",
"str",
",",
"str",
"]",
"]",
")",
"->",
"List",
"[",
"Adapter",
"]",
":",
"adapters",
"=",
"[",
"]",
"# type: List[Adapter]",
"for",
"cmdline_type",
",",
"spec",
"in",
"type_spec_pairs",
":",
"if",
"cmdline_type",
"not",
"in",
"{",
"'front'",
",",
"'back'",
",",
"'anywhere'",
"}",
":",
"raise",
"ValueError",
"(",
"'adapter type must be front, back or anywhere'",
")",
"adapters",
".",
"extend",
"(",
"self",
".",
"parse",
"(",
"spec",
",",
"cmdline_type",
")",
")",
"return",
"adapters"
] |
https://github.com/marcelm/cutadapt/blob/c63043e0f43970619bb7f8c1242912c236d60545/src/cutadapt/parser.py#L446-L460
|
|
LabPy/lantz
|
3e878e3f765a4295b0089d04e241d4beb7b8a65b
|
lantz/drivers/andor/ccd.py
|
python
|
CCD.setint
|
(self, strcommand, value)
|
SetInt function.
|
SetInt function.
|
[
"SetInt",
"function",
"."
] |
def setint(self, strcommand, value):
"""SetInt function.
"""
command = ct.c_wchar_p(strcommand)
value = ct.c_longlong(value)
self.lib.AT_SetInt(self.AT_H, command, value)
|
[
"def",
"setint",
"(",
"self",
",",
"strcommand",
",",
"value",
")",
":",
"command",
"=",
"ct",
".",
"c_wchar_p",
"(",
"strcommand",
")",
"value",
"=",
"ct",
".",
"c_longlong",
"(",
"value",
")",
"self",
".",
"lib",
".",
"AT_SetInt",
"(",
"self",
".",
"AT_H",
",",
"command",
",",
"value",
")"
] |
https://github.com/LabPy/lantz/blob/3e878e3f765a4295b0089d04e241d4beb7b8a65b/lantz/drivers/andor/ccd.py#L1810-L1815
|
||
openstack/magnum
|
fa298eeab19b1d87070d72c7c4fb26cd75b0781e
|
magnum/common/exception.py
|
python
|
MagnumException.__init__
|
(self, message=None, **kwargs)
|
[] |
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if 'code' not in self.kwargs and hasattr(self, 'code'):
self.kwargs['code'] = self.code
if message:
self.message = message
try:
self.message = self.message % kwargs
except Exception:
# kwargs doesn't match a variable in the message
# log the issue and the kwargs
LOG.exception('Exception in string format operation, '
'kwargs: %s', kwargs)
try:
if CONF.fatal_exception_format_errors:
raise
except cfg.NoSuchOptError:
# Note: work around for Bug: #1447873
if CONF.oslo_versionedobjects.fatal_exception_format_errors:
raise
super(MagnumException, self).__init__(self.message)
|
[
"def",
"__init__",
"(",
"self",
",",
"message",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"kwargs",
"=",
"kwargs",
"if",
"'code'",
"not",
"in",
"self",
".",
"kwargs",
"and",
"hasattr",
"(",
"self",
",",
"'code'",
")",
":",
"self",
".",
"kwargs",
"[",
"'code'",
"]",
"=",
"self",
".",
"code",
"if",
"message",
":",
"self",
".",
"message",
"=",
"message",
"try",
":",
"self",
".",
"message",
"=",
"self",
".",
"message",
"%",
"kwargs",
"except",
"Exception",
":",
"# kwargs doesn't match a variable in the message",
"# log the issue and the kwargs",
"LOG",
".",
"exception",
"(",
"'Exception in string format operation, '",
"'kwargs: %s'",
",",
"kwargs",
")",
"try",
":",
"if",
"CONF",
".",
"fatal_exception_format_errors",
":",
"raise",
"except",
"cfg",
".",
"NoSuchOptError",
":",
"# Note: work around for Bug: #1447873",
"if",
"CONF",
".",
"oslo_versionedobjects",
".",
"fatal_exception_format_errors",
":",
"raise",
"super",
"(",
"MagnumException",
",",
"self",
")",
".",
"__init__",
"(",
"self",
".",
"message",
")"
] |
https://github.com/openstack/magnum/blob/fa298eeab19b1d87070d72c7c4fb26cd75b0781e/magnum/common/exception.py#L80-L104
|
||||
home-assistant/core
|
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
|
homeassistant/components/media_player/__init__.py
|
python
|
MediaPlayerEntity.set_volume_level
|
(self, volume)
|
Set volume level, range 0..1.
|
Set volume level, range 0..1.
|
[
"Set",
"volume",
"level",
"range",
"0",
"..",
"1",
"."
] |
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
raise NotImplementedError()
|
[
"def",
"set_volume_level",
"(",
"self",
",",
"volume",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] |
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/media_player/__init__.py#L658-L660
|
||
krintoxi/NoobSec-Toolkit
|
38738541cbc03cedb9a3b3ed13b629f781ad64f6
|
NoobSecToolkit /tools/sqli/tamper/space2hash.py
|
python
|
tamper
|
(payload, **kwargs)
|
return retVal
|
Replaces space character (' ') with a pound character ('#') followed by
a random string and a new line ('\n')
Requirement:
* MySQL
Tested against:
* MySQL 4.0, 5.0
Notes:
* Useful to bypass several web application firewalls
* Used during the ModSecurity SQL injection challenge,
http://modsecurity.org/demo/challenge.html
>>> random.seed(0)
>>> tamper('1 AND 9227=9227')
'1%23nVNaVoPYeva%0AAND%23ngNvzqu%0A9227=9227'
|
Replaces space character (' ') with a pound character ('#') followed by
a random string and a new line ('\n')
|
[
"Replaces",
"space",
"character",
"(",
")",
"with",
"a",
"pound",
"character",
"(",
"#",
")",
"followed",
"by",
"a",
"random",
"string",
"and",
"a",
"new",
"line",
"(",
"\\",
"n",
")"
] |
def tamper(payload, **kwargs):
"""
Replaces space character (' ') with a pound character ('#') followed by
a random string and a new line ('\n')
Requirement:
* MySQL
Tested against:
* MySQL 4.0, 5.0
Notes:
* Useful to bypass several web application firewalls
* Used during the ModSecurity SQL injection challenge,
http://modsecurity.org/demo/challenge.html
>>> random.seed(0)
>>> tamper('1 AND 9227=9227')
'1%23nVNaVoPYeva%0AAND%23ngNvzqu%0A9227=9227'
"""
retVal = ""
if payload:
for i in xrange(len(payload)):
if payload[i].isspace():
randomStr = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in xrange(random.randint(6, 12)))
retVal += "%%23%s%%0A" % randomStr
elif payload[i] == '#' or payload[i:i + 3] == '-- ':
retVal += payload[i:]
break
else:
retVal += payload[i]
return retVal
|
[
"def",
"tamper",
"(",
"payload",
",",
"*",
"*",
"kwargs",
")",
":",
"retVal",
"=",
"\"\"",
"if",
"payload",
":",
"for",
"i",
"in",
"xrange",
"(",
"len",
"(",
"payload",
")",
")",
":",
"if",
"payload",
"[",
"i",
"]",
".",
"isspace",
"(",
")",
":",
"randomStr",
"=",
"''",
".",
"join",
"(",
"random",
".",
"choice",
"(",
"string",
".",
"ascii_uppercase",
"+",
"string",
".",
"ascii_lowercase",
")",
"for",
"_",
"in",
"xrange",
"(",
"random",
".",
"randint",
"(",
"6",
",",
"12",
")",
")",
")",
"retVal",
"+=",
"\"%%23%s%%0A\"",
"%",
"randomStr",
"elif",
"payload",
"[",
"i",
"]",
"==",
"'#'",
"or",
"payload",
"[",
"i",
":",
"i",
"+",
"3",
"]",
"==",
"'-- '",
":",
"retVal",
"+=",
"payload",
"[",
"i",
":",
"]",
"break",
"else",
":",
"retVal",
"+=",
"payload",
"[",
"i",
"]",
"return",
"retVal"
] |
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit /tools/sqli/tamper/space2hash.py#L21-L55
|
|
openhatch/oh-mainline
|
ce29352a034e1223141dcc2f317030bbc3359a51
|
vendor/packages/Django/django/contrib/auth/models.py
|
python
|
PermissionsMixin.has_module_perms
|
(self, app_label)
|
return _user_has_module_perms(self, app_label)
|
Returns True if the user has any permissions in the given app label.
Uses pretty much the same logic as has_perm, above.
|
Returns True if the user has any permissions in the given app label.
Uses pretty much the same logic as has_perm, above.
|
[
"Returns",
"True",
"if",
"the",
"user",
"has",
"any",
"permissions",
"in",
"the",
"given",
"app",
"label",
".",
"Uses",
"pretty",
"much",
"the",
"same",
"logic",
"as",
"has_perm",
"above",
"."
] |
def has_module_perms(self, app_label):
"""
Returns True if the user has any permissions in the given app label.
Uses pretty much the same logic as has_perm, above.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
return _user_has_module_perms(self, app_label)
|
[
"def",
"has_module_perms",
"(",
"self",
",",
"app_label",
")",
":",
"# Active superusers have all permissions.",
"if",
"self",
".",
"is_active",
"and",
"self",
".",
"is_superuser",
":",
"return",
"True",
"return",
"_user_has_module_perms",
"(",
"self",
",",
"app_label",
")"
] |
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/Django/django/contrib/auth/models.py#L358-L367
|
|
pandas-dev/pandas
|
5ba7d714014ae8feaccc0dd4a98890828cf2832d
|
pandas/core/tools/datetimes.py
|
python
|
_to_datetime_with_format
|
(
arg,
orig_arg,
name,
tz,
fmt: str,
exact: bool,
errors: str,
infer_datetime_format: bool,
)
|
Try parsing with the given format, returning None on failure.
|
Try parsing with the given format, returning None on failure.
|
[
"Try",
"parsing",
"with",
"the",
"given",
"format",
"returning",
"None",
"on",
"failure",
"."
] |
def _to_datetime_with_format(
arg,
orig_arg,
name,
tz,
fmt: str,
exact: bool,
errors: str,
infer_datetime_format: bool,
) -> Index | None:
"""
Try parsing with the given format, returning None on failure.
"""
result = None
try:
# shortcut formatting here
if fmt == "%Y%m%d":
# pass orig_arg as float-dtype may have been converted to
# datetime64[ns]
orig_arg = ensure_object(orig_arg)
try:
# may return None without raising
result = _attempt_YYYYMMDD(orig_arg, errors=errors)
except (ValueError, TypeError, OutOfBoundsDatetime) as err:
raise ValueError(
"cannot convert the input to '%Y%m%d' date format"
) from err
if result is not None:
utc = tz == "utc"
return _box_as_indexlike(result, utc=utc, name=name)
# fallback
res = _array_strptime_with_fallback(
arg, name, tz, fmt, exact, errors, infer_datetime_format
)
return res
except ValueError as err:
# Fallback to try to convert datetime objects if timezone-aware
# datetime objects are found without passing `utc=True`
try:
values, tz = conversion.datetime_to_datetime64(arg)
dta = DatetimeArray(values, dtype=tz_to_dtype(tz))
return DatetimeIndex._simple_new(dta, name=name)
except (ValueError, TypeError):
raise err
|
[
"def",
"_to_datetime_with_format",
"(",
"arg",
",",
"orig_arg",
",",
"name",
",",
"tz",
",",
"fmt",
":",
"str",
",",
"exact",
":",
"bool",
",",
"errors",
":",
"str",
",",
"infer_datetime_format",
":",
"bool",
",",
")",
"->",
"Index",
"|",
"None",
":",
"result",
"=",
"None",
"try",
":",
"# shortcut formatting here",
"if",
"fmt",
"==",
"\"%Y%m%d\"",
":",
"# pass orig_arg as float-dtype may have been converted to",
"# datetime64[ns]",
"orig_arg",
"=",
"ensure_object",
"(",
"orig_arg",
")",
"try",
":",
"# may return None without raising",
"result",
"=",
"_attempt_YYYYMMDD",
"(",
"orig_arg",
",",
"errors",
"=",
"errors",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
",",
"OutOfBoundsDatetime",
")",
"as",
"err",
":",
"raise",
"ValueError",
"(",
"\"cannot convert the input to '%Y%m%d' date format\"",
")",
"from",
"err",
"if",
"result",
"is",
"not",
"None",
":",
"utc",
"=",
"tz",
"==",
"\"utc\"",
"return",
"_box_as_indexlike",
"(",
"result",
",",
"utc",
"=",
"utc",
",",
"name",
"=",
"name",
")",
"# fallback",
"res",
"=",
"_array_strptime_with_fallback",
"(",
"arg",
",",
"name",
",",
"tz",
",",
"fmt",
",",
"exact",
",",
"errors",
",",
"infer_datetime_format",
")",
"return",
"res",
"except",
"ValueError",
"as",
"err",
":",
"# Fallback to try to convert datetime objects if timezone-aware",
"# datetime objects are found without passing `utc=True`",
"try",
":",
"values",
",",
"tz",
"=",
"conversion",
".",
"datetime_to_datetime64",
"(",
"arg",
")",
"dta",
"=",
"DatetimeArray",
"(",
"values",
",",
"dtype",
"=",
"tz_to_dtype",
"(",
"tz",
")",
")",
"return",
"DatetimeIndex",
".",
"_simple_new",
"(",
"dta",
",",
"name",
"=",
"name",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"raise",
"err"
] |
https://github.com/pandas-dev/pandas/blob/5ba7d714014ae8feaccc0dd4a98890828cf2832d/pandas/core/tools/datetimes.py#L469-L514
|
||
statsmodels/statsmodels
|
debbe7ea6ba28fe5bdb78f09f8cac694bef98722
|
statsmodels/stats/outliers_influence.py
|
python
|
GLMInfluence.cooks_distance
|
(self)
|
return cooks_d2, pvals
|
Cook's distance
Notes
-----
Based on one step approximation using resid_studentized and
hat_matrix_diag for the computation.
Cook's distance divides by the number of explanatory variables.
Computed using formulas for GLM and does not use results.cov_params.
It includes p-values based on the F-distribution which are only
approximate outside of linear Gaussian models.
|
Cook's distance
|
[
"Cook",
"s",
"distance"
] |
def cooks_distance(self):
"""Cook's distance
Notes
-----
Based on one step approximation using resid_studentized and
hat_matrix_diag for the computation.
Cook's distance divides by the number of explanatory variables.
Computed using formulas for GLM and does not use results.cov_params.
It includes p-values based on the F-distribution which are only
approximate outside of linear Gaussian models.
"""
hii = self.hat_matrix_diag
# Eubank p.93, 94
cooks_d2 = self.resid_studentized ** 2 / self.k_vars
cooks_d2 *= hii / (1 - hii)
from scipy import stats
# alpha = 0.1
# print stats.f.isf(1-alpha, n_params, res.df_modelwc)
pvals = stats.f.sf(cooks_d2, self.k_vars, self.results.df_resid)
return cooks_d2, pvals
|
[
"def",
"cooks_distance",
"(",
"self",
")",
":",
"hii",
"=",
"self",
".",
"hat_matrix_diag",
"# Eubank p.93, 94",
"cooks_d2",
"=",
"self",
".",
"resid_studentized",
"**",
"2",
"/",
"self",
".",
"k_vars",
"cooks_d2",
"*=",
"hii",
"/",
"(",
"1",
"-",
"hii",
")",
"from",
"scipy",
"import",
"stats",
"# alpha = 0.1",
"# print stats.f.isf(1-alpha, n_params, res.df_modelwc)",
"pvals",
"=",
"stats",
".",
"f",
".",
"sf",
"(",
"cooks_d2",
",",
"self",
".",
"k_vars",
",",
"self",
".",
"results",
".",
"df_resid",
")",
"return",
"cooks_d2",
",",
"pvals"
] |
https://github.com/statsmodels/statsmodels/blob/debbe7ea6ba28fe5bdb78f09f8cac694bef98722/statsmodels/stats/outliers_influence.py#L1389-L1414
|
|
dragonfly/dragonfly
|
a579b5eadf452e23b07d4caf27b402703b0012b7
|
dragonfly/opt/blackbox_optimiser.py
|
python
|
OptInitialiser.__init__
|
(self, func_caller, worker_manager, get_initial_qinfos=None,
initialisation_capital=None, options=None, reporter=None)
|
Constructor.
|
Constructor.
|
[
"Constructor",
"."
] |
def __init__(self, func_caller, worker_manager, get_initial_qinfos=None,
initialisation_capital=None, options=None, reporter=None):
""" Constructor. """
options = load_options(blackbox_opt_args, partial_options=options)
super(OptInitialiser, self).__init__(func_caller, worker_manager, model=None,
options=options, reporter=reporter)
self.options.max_num_steps = 0
self.options.get_initial_qinfos = get_initial_qinfos
self.options.init_capital = initialisation_capital
|
[
"def",
"__init__",
"(",
"self",
",",
"func_caller",
",",
"worker_manager",
",",
"get_initial_qinfos",
"=",
"None",
",",
"initialisation_capital",
"=",
"None",
",",
"options",
"=",
"None",
",",
"reporter",
"=",
"None",
")",
":",
"options",
"=",
"load_options",
"(",
"blackbox_opt_args",
",",
"partial_options",
"=",
"options",
")",
"super",
"(",
"OptInitialiser",
",",
"self",
")",
".",
"__init__",
"(",
"func_caller",
",",
"worker_manager",
",",
"model",
"=",
"None",
",",
"options",
"=",
"options",
",",
"reporter",
"=",
"reporter",
")",
"self",
".",
"options",
".",
"max_num_steps",
"=",
"0",
"self",
".",
"options",
".",
"get_initial_qinfos",
"=",
"get_initial_qinfos",
"self",
".",
"options",
".",
"init_capital",
"=",
"initialisation_capital"
] |
https://github.com/dragonfly/dragonfly/blob/a579b5eadf452e23b07d4caf27b402703b0012b7/dragonfly/opt/blackbox_optimiser.py#L343-L351
|
||
apache/libcloud
|
90971e17bfd7b6bb97b2489986472c531cc8e140
|
libcloud/compute/drivers/linode.py
|
python
|
LinodeNodeDriverV3.stop_node
|
(self, node)
|
return True
|
Shutdown the given Linode
|
Shutdown the given Linode
|
[
"Shutdown",
"the",
"given",
"Linode"
] |
def stop_node(self, node):
"""
Shutdown the given Linode
"""
params = {"api_action": "linode.shutdown", "LinodeID": node.id}
self.connection.request(API_ROOT, params=params)
return True
|
[
"def",
"stop_node",
"(",
"self",
",",
"node",
")",
":",
"params",
"=",
"{",
"\"api_action\"",
":",
"\"linode.shutdown\"",
",",
"\"LinodeID\"",
":",
"node",
".",
"id",
"}",
"self",
".",
"connection",
".",
"request",
"(",
"API_ROOT",
",",
"params",
"=",
"params",
")",
"return",
"True"
] |
https://github.com/apache/libcloud/blob/90971e17bfd7b6bb97b2489986472c531cc8e140/libcloud/compute/drivers/linode.py#L179-L186
|
|
itailang/SampleNet
|
442459abc54f9e14f0966a169a094a98febd32eb
|
classification/utils/plyfile.py
|
python
|
PlyListProperty._set_len_dtype
|
(self, len_dtype)
|
[] |
def _set_len_dtype(self, len_dtype):
self._len_dtype = _data_types[_lookup_type(len_dtype)]
|
[
"def",
"_set_len_dtype",
"(",
"self",
",",
"len_dtype",
")",
":",
"self",
".",
"_len_dtype",
"=",
"_data_types",
"[",
"_lookup_type",
"(",
"len_dtype",
")",
"]"
] |
https://github.com/itailang/SampleNet/blob/442459abc54f9e14f0966a169a094a98febd32eb/classification/utils/plyfile.py#L822-L823
|
||||
ilastik/ilastik
|
6acd2c554bc517e9c8ddad3623a7aaa2e6970c28
|
lazyflow/roi.py
|
python
|
sliceToRoi
|
(
slicing: Union[numbers.Integral, slice, "ellipsis", Sequence[Union[numbers.Integral, slice, "ellipsis"]]],
shape: Sequence[numbers.Integral],
*,
extendSingleton: bool = True,
)
|
return roi_start, roi_stop
|
Convert slicing to ROI.
Negative indices and slices are allowed similarly to usual Python
and NumPy indexing. None (newaxis) objects are not allowed.
As a special case, passing a 0-dimensional shape ``()`` always
returns 0-dimensional ROI, regardless of the value of `slicing`.
Args:
slicing: Valid slicing.
shape: Shape of the target array.
extendSingleton: Whether to keep dimensions of int indices
(similar to negated ``keepdims`` from some NumPy functions,
see the examples).
Returns:
``(start, stop)`` pair.
Raises:
ValueError: Slicing is not valid (either by itself or in
combination with the given shape).
Examples:
Point indexing:
>>> sliceToRoi(1, (7, 8, 9))
([1, 0, 0], [2, 8, 9])
>>> sliceToRoi(-1, (7, 8, 9))
([6, 0, 0], [7, 8, 9])
>>> sliceToRoi(1, (7, 8, 9), extendSingleton=False)
([1, 0, 0], [1, 8, 9])
>>> sliceToRoi(42, (7, 8, 9)) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError
Slice indexing:
>>> sliceToRoi(slice(None, None), (7, 8, 9))
([0, 0, 0], [7, 8, 9])
>>> sliceToRoi(slice(2, 5), (7, 8, 9))
([2, 0, 0], [5, 8, 9])
>>> sliceToRoi(slice(2, 42), (7, 8, 9))
([2, 0, 0], [7, 8, 9])
>>> sliceToRoi(slice(-1, 5), (7, 8, 9))
([6, 0, 0], [5, 8, 9])
>>> sliceToRoi(slice(-42, 42), (7, 8, 9))
([0, 0, 0], [7, 8, 9])
>>> sliceToRoi(slice(None, None, 2), (7, 8, 9)) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError
Multi indexing:
>>> sliceToRoi((), (7, 8, 9))
([0, 0, 0], [7, 8, 9])
>>> sliceToRoi((1,), (7, 8, 9))
([1, 0, 0], [2, 8, 9])
>>> sliceToRoi((1, 2), (7, 8, 9))
([1, 2, 0], [2, 3, 9])
>>> sliceToRoi([1, 2], (7, 8, 9))
([1, 2, 0], [2, 3, 9])
>>> sliceToRoi((slice(2, 5), slice(3, 6), 5), (7, 8, 9))
([2, 3, 5], [5, 6, 6])
>>> sliceToRoi((1, 2, 3, 4), (7, 8, 9)) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError
Ellipsis indexing:
>>> sliceToRoi((1, ..., 5), (7, 8, 9))
([1, 0, 5], [2, 8, 6])
>>> sliceToRoi((..., slice(2, 5)), (7, 8, 9))
([0, 0, 2], [7, 8, 5])
>>> sliceToRoi((..., ...), (7, 8, 9)) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError
Zero-dimensional shape:
>>> sliceToRoi((), ())
([], [])
>>> sliceToRoi((1, 2, 3), ())
([], [])
|
Convert slicing to ROI.
|
[
"Convert",
"slicing",
"to",
"ROI",
"."
] |
def sliceToRoi(
slicing: Union[numbers.Integral, slice, "ellipsis", Sequence[Union[numbers.Integral, slice, "ellipsis"]]],
shape: Sequence[numbers.Integral],
*,
extendSingleton: bool = True,
) -> Tuple[Sequence[int], Sequence[int]]:
"""Convert slicing to ROI.
Negative indices and slices are allowed similarly to usual Python
and NumPy indexing. None (newaxis) objects are not allowed.
As a special case, passing a 0-dimensional shape ``()`` always
returns 0-dimensional ROI, regardless of the value of `slicing`.
Args:
slicing: Valid slicing.
shape: Shape of the target array.
extendSingleton: Whether to keep dimensions of int indices
(similar to negated ``keepdims`` from some NumPy functions,
see the examples).
Returns:
``(start, stop)`` pair.
Raises:
ValueError: Slicing is not valid (either by itself or in
combination with the given shape).
Examples:
Point indexing:
>>> sliceToRoi(1, (7, 8, 9))
([1, 0, 0], [2, 8, 9])
>>> sliceToRoi(-1, (7, 8, 9))
([6, 0, 0], [7, 8, 9])
>>> sliceToRoi(1, (7, 8, 9), extendSingleton=False)
([1, 0, 0], [1, 8, 9])
>>> sliceToRoi(42, (7, 8, 9)) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError
Slice indexing:
>>> sliceToRoi(slice(None, None), (7, 8, 9))
([0, 0, 0], [7, 8, 9])
>>> sliceToRoi(slice(2, 5), (7, 8, 9))
([2, 0, 0], [5, 8, 9])
>>> sliceToRoi(slice(2, 42), (7, 8, 9))
([2, 0, 0], [7, 8, 9])
>>> sliceToRoi(slice(-1, 5), (7, 8, 9))
([6, 0, 0], [5, 8, 9])
>>> sliceToRoi(slice(-42, 42), (7, 8, 9))
([0, 0, 0], [7, 8, 9])
>>> sliceToRoi(slice(None, None, 2), (7, 8, 9)) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError
Multi indexing:
>>> sliceToRoi((), (7, 8, 9))
([0, 0, 0], [7, 8, 9])
>>> sliceToRoi((1,), (7, 8, 9))
([1, 0, 0], [2, 8, 9])
>>> sliceToRoi((1, 2), (7, 8, 9))
([1, 2, 0], [2, 3, 9])
>>> sliceToRoi([1, 2], (7, 8, 9))
([1, 2, 0], [2, 3, 9])
>>> sliceToRoi((slice(2, 5), slice(3, 6), 5), (7, 8, 9))
([2, 3, 5], [5, 6, 6])
>>> sliceToRoi((1, 2, 3, 4), (7, 8, 9)) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError
Ellipsis indexing:
>>> sliceToRoi((1, ..., 5), (7, 8, 9))
([1, 0, 5], [2, 8, 6])
>>> sliceToRoi((..., slice(2, 5)), (7, 8, 9))
([0, 0, 2], [7, 8, 5])
>>> sliceToRoi((..., ...), (7, 8, 9)) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError
Zero-dimensional shape:
>>> sliceToRoi((), ())
([], [])
>>> sliceToRoi((1, 2, 3), ())
([], [])
"""
if not shape:
return TinyVector(), TinyVector()
try:
slicing = list(slicing)
except TypeError:
slicing = [slicing]
try:
i = slicing.index(Ellipsis)
slicing[i : i + 1] = [slice(None)] * (len(shape) - len(slicing) + 1)
except ValueError:
pass
if Ellipsis in slicing:
raise ValueError("an index can only have a single ellipsis ('...')")
if len(slicing) > len(shape):
raise ValueError("too many indices for array")
slicing += [slice(None)] * (len(shape) - len(slicing))
roi_start = TinyVector()
roi_stop = TinyVector()
for i, (idx, dim) in enumerate(zip(slicing, shape)):
if isinstance(idx, numbers.Integral):
idx = int(idx)
if idx not in range(-dim, dim):
raise ValueError(f"index {idx} is out of bounds for axis {i} with size {dim}")
if idx < 0:
idx += dim
start, stop = idx, idx
if extendSingleton:
stop += 1
elif isinstance(idx, slice):
start, stop, step = idx.indices(dim)
if step != 1:
raise ValueError(f"slice {idx} has non-contiguous stride for axis {i}")
else:
raise ValueError("only integers, slices (`:`) and ellipsis (`...`) are valid indices")
roi_start.append(start)
roi_stop.append(stop)
return roi_start, roi_stop
|
[
"def",
"sliceToRoi",
"(",
"slicing",
":",
"Union",
"[",
"numbers",
".",
"Integral",
",",
"slice",
",",
"\"ellipsis\"",
",",
"Sequence",
"[",
"Union",
"[",
"numbers",
".",
"Integral",
",",
"slice",
",",
"\"ellipsis\"",
"]",
"]",
"]",
",",
"shape",
":",
"Sequence",
"[",
"numbers",
".",
"Integral",
"]",
",",
"*",
",",
"extendSingleton",
":",
"bool",
"=",
"True",
",",
")",
"->",
"Tuple",
"[",
"Sequence",
"[",
"int",
"]",
",",
"Sequence",
"[",
"int",
"]",
"]",
":",
"if",
"not",
"shape",
":",
"return",
"TinyVector",
"(",
")",
",",
"TinyVector",
"(",
")",
"try",
":",
"slicing",
"=",
"list",
"(",
"slicing",
")",
"except",
"TypeError",
":",
"slicing",
"=",
"[",
"slicing",
"]",
"try",
":",
"i",
"=",
"slicing",
".",
"index",
"(",
"Ellipsis",
")",
"slicing",
"[",
"i",
":",
"i",
"+",
"1",
"]",
"=",
"[",
"slice",
"(",
"None",
")",
"]",
"*",
"(",
"len",
"(",
"shape",
")",
"-",
"len",
"(",
"slicing",
")",
"+",
"1",
")",
"except",
"ValueError",
":",
"pass",
"if",
"Ellipsis",
"in",
"slicing",
":",
"raise",
"ValueError",
"(",
"\"an index can only have a single ellipsis ('...')\"",
")",
"if",
"len",
"(",
"slicing",
")",
">",
"len",
"(",
"shape",
")",
":",
"raise",
"ValueError",
"(",
"\"too many indices for array\"",
")",
"slicing",
"+=",
"[",
"slice",
"(",
"None",
")",
"]",
"*",
"(",
"len",
"(",
"shape",
")",
"-",
"len",
"(",
"slicing",
")",
")",
"roi_start",
"=",
"TinyVector",
"(",
")",
"roi_stop",
"=",
"TinyVector",
"(",
")",
"for",
"i",
",",
"(",
"idx",
",",
"dim",
")",
"in",
"enumerate",
"(",
"zip",
"(",
"slicing",
",",
"shape",
")",
")",
":",
"if",
"isinstance",
"(",
"idx",
",",
"numbers",
".",
"Integral",
")",
":",
"idx",
"=",
"int",
"(",
"idx",
")",
"if",
"idx",
"not",
"in",
"range",
"(",
"-",
"dim",
",",
"dim",
")",
":",
"raise",
"ValueError",
"(",
"f\"index {idx} is out of bounds for axis {i} with size {dim}\"",
")",
"if",
"idx",
"<",
"0",
":",
"idx",
"+=",
"dim",
"start",
",",
"stop",
"=",
"idx",
",",
"idx",
"if",
"extendSingleton",
":",
"stop",
"+=",
"1",
"elif",
"isinstance",
"(",
"idx",
",",
"slice",
")",
":",
"start",
",",
"stop",
",",
"step",
"=",
"idx",
".",
"indices",
"(",
"dim",
")",
"if",
"step",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"f\"slice {idx} has non-contiguous stride for axis {i}\"",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"only integers, slices (`:`) and ellipsis (`...`) are valid indices\"",
")",
"roi_start",
".",
"append",
"(",
"start",
")",
"roi_stop",
".",
"append",
"(",
"stop",
")",
"return",
"roi_start",
",",
"roi_stop"
] |
https://github.com/ilastik/ilastik/blob/6acd2c554bc517e9c8ddad3623a7aaa2e6970c28/lazyflow/roi.py#L252-L394
|
|
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/benchmarks/src/benchmarks/sympy/sympy/physics/secondquant.py
|
python
|
_SymbolFactory._next
|
(self)
|
return s
|
Generates the next symbols and increments counter by 1.
|
Generates the next symbols and increments counter by 1.
|
[
"Generates",
"the",
"next",
"symbols",
"and",
"increments",
"counter",
"by",
"1",
"."
] |
def _next(self):
"""
Generates the next symbols and increments counter by 1.
"""
s = Symbol("%s%i" % (self._label, self._counterVar))
self._counterVar += 1
return s
|
[
"def",
"_next",
"(",
"self",
")",
":",
"s",
"=",
"Symbol",
"(",
"\"%s%i\"",
"%",
"(",
"self",
".",
"_label",
",",
"self",
".",
"_counterVar",
")",
")",
"self",
".",
"_counterVar",
"+=",
"1",
"return",
"s"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/physics/secondquant.py#L2741-L2747
|
|
TencentCloud/tencentcloud-sdk-python
|
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
|
tencentcloud/tdmq/v20200217/models.py
|
python
|
AMQPRouteRelation.__init__
|
(self)
|
r"""
:param RouteRelationId: 路由关系ID
:type RouteRelationId: str
:param SourceExchange: 源Exchange
:type SourceExchange: str
:param DestType: 目标类型:Queue|Exchange
:type DestType: str
:param DestValue: 目标值
:type DestValue: str
:param RoutingKey: 绑定key
:type RoutingKey: str
:param SourceExchangeType: 源路由类型:Direct|Topic|Fanout
:type SourceExchangeType: str
:param CreateTime: 创建时间,以毫秒为单位
:type CreateTime: int
:param UpdateTime: 修改时间,以毫秒为单位
:type UpdateTime: int
:param Remark: 说明信息
注意:此字段可能返回 null,表示取不到有效值。
:type Remark: str
|
r"""
:param RouteRelationId: 路由关系ID
:type RouteRelationId: str
:param SourceExchange: 源Exchange
:type SourceExchange: str
:param DestType: 目标类型:Queue|Exchange
:type DestType: str
:param DestValue: 目标值
:type DestValue: str
:param RoutingKey: 绑定key
:type RoutingKey: str
:param SourceExchangeType: 源路由类型:Direct|Topic|Fanout
:type SourceExchangeType: str
:param CreateTime: 创建时间,以毫秒为单位
:type CreateTime: int
:param UpdateTime: 修改时间,以毫秒为单位
:type UpdateTime: int
:param Remark: 说明信息
注意:此字段可能返回 null,表示取不到有效值。
:type Remark: str
|
[
"r",
":",
"param",
"RouteRelationId",
":",
"路由关系ID",
":",
"type",
"RouteRelationId",
":",
"str",
":",
"param",
"SourceExchange",
":",
"源Exchange",
":",
"type",
"SourceExchange",
":",
"str",
":",
"param",
"DestType",
":",
"目标类型",
":",
"Queue|Exchange",
":",
"type",
"DestType",
":",
"str",
":",
"param",
"DestValue",
":",
"目标值",
":",
"type",
"DestValue",
":",
"str",
":",
"param",
"RoutingKey",
":",
"绑定key",
":",
"type",
"RoutingKey",
":",
"str",
":",
"param",
"SourceExchangeType",
":",
"源路由类型",
":",
"Direct|Topic|Fanout",
":",
"type",
"SourceExchangeType",
":",
"str",
":",
"param",
"CreateTime",
":",
"创建时间,以毫秒为单位",
":",
"type",
"CreateTime",
":",
"int",
":",
"param",
"UpdateTime",
":",
"修改时间,以毫秒为单位",
":",
"type",
"UpdateTime",
":",
"int",
":",
"param",
"Remark",
":",
"说明信息",
"注意:此字段可能返回",
"null,表示取不到有效值。",
":",
"type",
"Remark",
":",
"str"
] |
def __init__(self):
r"""
:param RouteRelationId: 路由关系ID
:type RouteRelationId: str
:param SourceExchange: 源Exchange
:type SourceExchange: str
:param DestType: 目标类型:Queue|Exchange
:type DestType: str
:param DestValue: 目标值
:type DestValue: str
:param RoutingKey: 绑定key
:type RoutingKey: str
:param SourceExchangeType: 源路由类型:Direct|Topic|Fanout
:type SourceExchangeType: str
:param CreateTime: 创建时间,以毫秒为单位
:type CreateTime: int
:param UpdateTime: 修改时间,以毫秒为单位
:type UpdateTime: int
:param Remark: 说明信息
注意:此字段可能返回 null,表示取不到有效值。
:type Remark: str
"""
self.RouteRelationId = None
self.SourceExchange = None
self.DestType = None
self.DestValue = None
self.RoutingKey = None
self.SourceExchangeType = None
self.CreateTime = None
self.UpdateTime = None
self.Remark = None
|
[
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"RouteRelationId",
"=",
"None",
"self",
".",
"SourceExchange",
"=",
"None",
"self",
".",
"DestType",
"=",
"None",
"self",
".",
"DestValue",
"=",
"None",
"self",
".",
"RoutingKey",
"=",
"None",
"self",
".",
"SourceExchangeType",
"=",
"None",
"self",
".",
"CreateTime",
"=",
"None",
"self",
".",
"UpdateTime",
"=",
"None",
"self",
".",
"Remark",
"=",
"None"
] |
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/tdmq/v20200217/models.py#L338-L368
|
||
selfteaching/selfteaching-python-camp
|
9982ee964b984595e7d664b07c389cddaf158f1e
|
19100205/Ceasar1978/pip-19.0.3/src/pip/_vendor/requests/_internal_utils.py
|
python
|
to_native_string
|
(string, encoding='ascii')
|
return out
|
Given a string object, regardless of type, returns a representation of
that string in the native string type, encoding and decoding where
necessary. This assumes ASCII unless told otherwise.
|
Given a string object, regardless of type, returns a representation of
that string in the native string type, encoding and decoding where
necessary. This assumes ASCII unless told otherwise.
|
[
"Given",
"a",
"string",
"object",
"regardless",
"of",
"type",
"returns",
"a",
"representation",
"of",
"that",
"string",
"in",
"the",
"native",
"string",
"type",
"encoding",
"and",
"decoding",
"where",
"necessary",
".",
"This",
"assumes",
"ASCII",
"unless",
"told",
"otherwise",
"."
] |
def to_native_string(string, encoding='ascii'):
"""Given a string object, regardless of type, returns a representation of
that string in the native string type, encoding and decoding where
necessary. This assumes ASCII unless told otherwise.
"""
if isinstance(string, builtin_str):
out = string
else:
if is_py2:
out = string.encode(encoding)
else:
out = string.decode(encoding)
return out
|
[
"def",
"to_native_string",
"(",
"string",
",",
"encoding",
"=",
"'ascii'",
")",
":",
"if",
"isinstance",
"(",
"string",
",",
"builtin_str",
")",
":",
"out",
"=",
"string",
"else",
":",
"if",
"is_py2",
":",
"out",
"=",
"string",
".",
"encode",
"(",
"encoding",
")",
"else",
":",
"out",
"=",
"string",
".",
"decode",
"(",
"encoding",
")",
"return",
"out"
] |
https://github.com/selfteaching/selfteaching-python-camp/blob/9982ee964b984595e7d664b07c389cddaf158f1e/19100205/Ceasar1978/pip-19.0.3/src/pip/_vendor/requests/_internal_utils.py#L14-L27
|
|
edisonlz/fastor
|
342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3
|
base/site-packages/androguard/core/bytecodes/dvm.py
|
python
|
EncodedTypeAddrPair.get_type_idx
|
(self)
|
return self.type_idx
|
Return the index into the type_ids list for the type of the exception to catch
:rtype: int
|
Return the index into the type_ids list for the type of the exception to catch
|
[
"Return",
"the",
"index",
"into",
"the",
"type_ids",
"list",
"for",
"the",
"type",
"of",
"the",
"exception",
"to",
"catch"
] |
def get_type_idx(self) :
"""
Return the index into the type_ids list for the type of the exception to catch
:rtype: int
"""
return self.type_idx
|
[
"def",
"get_type_idx",
"(",
"self",
")",
":",
"return",
"self",
".",
"type_idx"
] |
https://github.com/edisonlz/fastor/blob/342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3/base/site-packages/androguard/core/bytecodes/dvm.py#L3461-L3467
|
|
yanzhou/CnkiSpider
|
348d7114f3ffee7b0a134cf6c5d01150433f3fde
|
src/bs4/diagnose.py
|
python
|
rword
|
(length=5)
|
return s
|
Generate a random word-like string.
|
Generate a random word-like string.
|
[
"Generate",
"a",
"random",
"word",
"-",
"like",
"string",
"."
] |
def rword(length=5):
"Generate a random word-like string."
s = ''
for i in range(length):
if i % 2 == 0:
t = _consonants
else:
t = _vowels
s += random.choice(t)
return s
|
[
"def",
"rword",
"(",
"length",
"=",
"5",
")",
":",
"s",
"=",
"''",
"for",
"i",
"in",
"range",
"(",
"length",
")",
":",
"if",
"i",
"%",
"2",
"==",
"0",
":",
"t",
"=",
"_consonants",
"else",
":",
"t",
"=",
"_vowels",
"s",
"+=",
"random",
".",
"choice",
"(",
"t",
")",
"return",
"s"
] |
https://github.com/yanzhou/CnkiSpider/blob/348d7114f3ffee7b0a134cf6c5d01150433f3fde/src/bs4/diagnose.py#L124-L133
|
|
theotherp/nzbhydra
|
4b03d7f769384b97dfc60dade4806c0fc987514e
|
libs/platform.py
|
python
|
dist
|
(distname='',version='',id='',
supported_dists=_supported_dists)
|
return linux_distribution(distname, version, id,
supported_dists=supported_dists,
full_distribution_name=0)
|
Tries to determine the name of the Linux OS distribution name.
The function first looks for a distribution release file in
/etc and then reverts to _dist_try_harder() in case no
suitable files are found.
Returns a tuple (distname,version,id) which default to the
args given as parameters.
|
Tries to determine the name of the Linux OS distribution name.
|
[
"Tries",
"to",
"determine",
"the",
"name",
"of",
"the",
"Linux",
"OS",
"distribution",
"name",
"."
] |
def dist(distname='',version='',id='',
supported_dists=_supported_dists):
""" Tries to determine the name of the Linux OS distribution name.
The function first looks for a distribution release file in
/etc and then reverts to _dist_try_harder() in case no
suitable files are found.
Returns a tuple (distname,version,id) which default to the
args given as parameters.
"""
return linux_distribution(distname, version, id,
supported_dists=supported_dists,
full_distribution_name=0)
|
[
"def",
"dist",
"(",
"distname",
"=",
"''",
",",
"version",
"=",
"''",
",",
"id",
"=",
"''",
",",
"supported_dists",
"=",
"_supported_dists",
")",
":",
"return",
"linux_distribution",
"(",
"distname",
",",
"version",
",",
"id",
",",
"supported_dists",
"=",
"supported_dists",
",",
"full_distribution_name",
"=",
"0",
")"
] |
https://github.com/theotherp/nzbhydra/blob/4b03d7f769384b97dfc60dade4806c0fc987514e/libs/platform.py#L347-L363
|
|
XX-net/XX-Net
|
a9898cfcf0084195fb7e69b6bc834e59aecdf14f
|
python3.8.2/Lib/site-packages/OpenSSL/SSL.py
|
python
|
Connection.accept
|
(self)
|
return (conn, addr)
|
Call the :meth:`accept` method of the underlying socket and set up SSL
on the returned socket, using the Context object supplied to this
:class:`Connection` object at creation.
:return: A *(conn, addr)* pair where *conn* is the new
:class:`Connection` object created, and *address* is as returned by
the socket's :meth:`accept`.
|
Call the :meth:`accept` method of the underlying socket and set up SSL
on the returned socket, using the Context object supplied to this
:class:`Connection` object at creation.
|
[
"Call",
"the",
":",
"meth",
":",
"accept",
"method",
"of",
"the",
"underlying",
"socket",
"and",
"set",
"up",
"SSL",
"on",
"the",
"returned",
"socket",
"using",
"the",
"Context",
"object",
"supplied",
"to",
"this",
":",
"class",
":",
"Connection",
"object",
"at",
"creation",
"."
] |
def accept(self):
"""
Call the :meth:`accept` method of the underlying socket and set up SSL
on the returned socket, using the Context object supplied to this
:class:`Connection` object at creation.
:return: A *(conn, addr)* pair where *conn* is the new
:class:`Connection` object created, and *address* is as returned by
the socket's :meth:`accept`.
"""
client, addr = self._socket.accept()
conn = Connection(self._context, client)
conn.set_accept_state()
return (conn, addr)
|
[
"def",
"accept",
"(",
"self",
")",
":",
"client",
",",
"addr",
"=",
"self",
".",
"_socket",
".",
"accept",
"(",
")",
"conn",
"=",
"Connection",
"(",
"self",
".",
"_context",
",",
"client",
")",
"conn",
".",
"set_accept_state",
"(",
")",
"return",
"(",
"conn",
",",
"addr",
")"
] |
https://github.com/XX-net/XX-Net/blob/a9898cfcf0084195fb7e69b6bc834e59aecdf14f/python3.8.2/Lib/site-packages/OpenSSL/SSL.py#L1981-L1994
|
|
007gzs/dingtalk-sdk
|
7979da2e259fdbc571728cae2425a04dbc65850a
|
dingtalk/client/api/taobao.py
|
python
|
TbGuoNeiJiPiaoDingDan.taobao_alitrip_seller_refund_get
|
(
self,
apply_id
)
|
return self._top_request(
"taobao.alitrip.seller.refund.get",
{
"apply_id": apply_id
}
)
|
【机票代理商】退票申请单详情
查询退票申请单详情
文档地址:https://open-doc.dingtalk.com/docs/api.htm?apiId=26087
:param apply_id: 申请单ID
|
【机票代理商】退票申请单详情
查询退票申请单详情
文档地址:https://open-doc.dingtalk.com/docs/api.htm?apiId=26087
|
[
"【机票代理商】退票申请单详情",
"查询退票申请单详情",
"文档地址:https",
":",
"//",
"open",
"-",
"doc",
".",
"dingtalk",
".",
"com",
"/",
"docs",
"/",
"api",
".",
"htm?apiId",
"=",
"26087"
] |
def taobao_alitrip_seller_refund_get(
self,
apply_id
):
"""
【机票代理商】退票申请单详情
查询退票申请单详情
文档地址:https://open-doc.dingtalk.com/docs/api.htm?apiId=26087
:param apply_id: 申请单ID
"""
return self._top_request(
"taobao.alitrip.seller.refund.get",
{
"apply_id": apply_id
}
)
|
[
"def",
"taobao_alitrip_seller_refund_get",
"(",
"self",
",",
"apply_id",
")",
":",
"return",
"self",
".",
"_top_request",
"(",
"\"taobao.alitrip.seller.refund.get\"",
",",
"{",
"\"apply_id\"",
":",
"apply_id",
"}",
")"
] |
https://github.com/007gzs/dingtalk-sdk/blob/7979da2e259fdbc571728cae2425a04dbc65850a/dingtalk/client/api/taobao.py#L76961-L76977
|
|
merixstudio/django-trench
|
27b61479e6d494d7c2e94732c1d186247dac8dd9
|
trench/views/base.py
|
python
|
MFAPrimaryMethodChangeView.post
|
(request: Request)
|
[] |
def post(request: Request) -> Response:
mfa_model = get_mfa_model()
mfa_method_name = mfa_model.objects.get_primary_active_name(
user_id=request.user.id
)
serializer = ChangePrimaryMethodValidator(
user=request.user, mfa_method_name=mfa_method_name, data=request.data
)
serializer.is_valid(raise_exception=True)
try:
set_primary_mfa_method_command(
user_id=request.user.id, name=serializer.validated_data["method"]
)
return Response(status=HTTP_204_NO_CONTENT)
except MFAValidationError as cause:
return ErrorResponse(error=cause)
|
[
"def",
"post",
"(",
"request",
":",
"Request",
")",
"->",
"Response",
":",
"mfa_model",
"=",
"get_mfa_model",
"(",
")",
"mfa_method_name",
"=",
"mfa_model",
".",
"objects",
".",
"get_primary_active_name",
"(",
"user_id",
"=",
"request",
".",
"user",
".",
"id",
")",
"serializer",
"=",
"ChangePrimaryMethodValidator",
"(",
"user",
"=",
"request",
".",
"user",
",",
"mfa_method_name",
"=",
"mfa_method_name",
",",
"data",
"=",
"request",
".",
"data",
")",
"serializer",
".",
"is_valid",
"(",
"raise_exception",
"=",
"True",
")",
"try",
":",
"set_primary_mfa_method_command",
"(",
"user_id",
"=",
"request",
".",
"user",
".",
"id",
",",
"name",
"=",
"serializer",
".",
"validated_data",
"[",
"\"method\"",
"]",
")",
"return",
"Response",
"(",
"status",
"=",
"HTTP_204_NO_CONTENT",
")",
"except",
"MFAValidationError",
"as",
"cause",
":",
"return",
"ErrorResponse",
"(",
"error",
"=",
"cause",
")"
] |
https://github.com/merixstudio/django-trench/blob/27b61479e6d494d7c2e94732c1d186247dac8dd9/trench/views/base.py#L233-L248
|
||||
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/axes/_axes.py
|
python
|
Axes.loglog
|
(self, *args, **kwargs)
|
return l
|
Make a plot with log scaling on both the x and y axis.
Call signatures::
loglog([x], y, [fmt], data=None, **kwargs)
loglog([x], y, [fmt], [x2], y2, [fmt2], ..., **kwargs)
This is just a thin wrapper around `.plot` which additionally changes
both the x-axis and the y-axis to log scaling. All of the concepts and
parameters of plot can be used here as well.
The additional parameters *basex/y*, *subsx/y* and *nonposx/y* control
the x/y-axis properties. They are just forwarded to `.Axes.set_xscale`
and `.Axes.set_yscale`.
Parameters
----------
basex, basey : scalar, optional, default 10
Base of the x/y logarithm.
subsx, subsy : sequence, optional
The location of the minor x/y ticks. If *None*, reasonable
locations are automatically chosen depending on the number of
decades in the plot.
See `.Axes.set_xscale` / `.Axes.set_yscale` for details.
nonposx, nonposy : {'mask', 'clip'}, optional, default 'mask'
Non-positive values in x or y can be masked as invalid, or clipped
to a very small positive number.
Returns
-------
lines
A list of `~.Line2D` objects representing the plotted data.
Other Parameters
----------------
**kwargs
All parameters supported by `.plot`.
|
Make a plot with log scaling on both the x and y axis.
|
[
"Make",
"a",
"plot",
"with",
"log",
"scaling",
"on",
"both",
"the",
"x",
"and",
"y",
"axis",
"."
] |
def loglog(self, *args, **kwargs):
"""
Make a plot with log scaling on both the x and y axis.
Call signatures::
loglog([x], y, [fmt], data=None, **kwargs)
loglog([x], y, [fmt], [x2], y2, [fmt2], ..., **kwargs)
This is just a thin wrapper around `.plot` which additionally changes
both the x-axis and the y-axis to log scaling. All of the concepts and
parameters of plot can be used here as well.
The additional parameters *basex/y*, *subsx/y* and *nonposx/y* control
the x/y-axis properties. They are just forwarded to `.Axes.set_xscale`
and `.Axes.set_yscale`.
Parameters
----------
basex, basey : scalar, optional, default 10
Base of the x/y logarithm.
subsx, subsy : sequence, optional
The location of the minor x/y ticks. If *None*, reasonable
locations are automatically chosen depending on the number of
decades in the plot.
See `.Axes.set_xscale` / `.Axes.set_yscale` for details.
nonposx, nonposy : {'mask', 'clip'}, optional, default 'mask'
Non-positive values in x or y can be masked as invalid, or clipped
to a very small positive number.
Returns
-------
lines
A list of `~.Line2D` objects representing the plotted data.
Other Parameters
----------------
**kwargs
All parameters supported by `.plot`.
"""
dx = {k: kwargs.pop(k) for k in ['basex', 'subsx', 'nonposx']
if k in kwargs}
dy = {k: kwargs.pop(k) for k in ['basey', 'subsy', 'nonposy']
if k in kwargs}
self.set_xscale('log', **dx)
self.set_yscale('log', **dy)
l = self.plot(*args, **kwargs)
return l
|
[
"def",
"loglog",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"dx",
"=",
"{",
"k",
":",
"kwargs",
".",
"pop",
"(",
"k",
")",
"for",
"k",
"in",
"[",
"'basex'",
",",
"'subsx'",
",",
"'nonposx'",
"]",
"if",
"k",
"in",
"kwargs",
"}",
"dy",
"=",
"{",
"k",
":",
"kwargs",
".",
"pop",
"(",
"k",
")",
"for",
"k",
"in",
"[",
"'basey'",
",",
"'subsy'",
",",
"'nonposy'",
"]",
"if",
"k",
"in",
"kwargs",
"}",
"self",
".",
"set_xscale",
"(",
"'log'",
",",
"*",
"*",
"dx",
")",
"self",
".",
"set_yscale",
"(",
"'log'",
",",
"*",
"*",
"dy",
")",
"l",
"=",
"self",
".",
"plot",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"l"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/axes/_axes.py#L1697-L1748
|
|
exodrifter/unity-python
|
bef6e4e9ddfbbf1eaf7acbbb973e9aa3dd64a20d
|
Lib/distutils/ccompiler.py
|
python
|
CCompiler.create_static_lib
|
(self, objects, output_libname, output_dir=None,
debug=0, target_lang=None)
|
Link a bunch of stuff together to create a static library file.
The "bunch of stuff" consists of the list of object files supplied
as 'objects', the extra object files supplied to
'add_link_object()' and/or 'set_link_objects()', the libraries
supplied to 'add_library()' and/or 'set_libraries()', and the
libraries supplied as 'libraries' (if any).
'output_libname' should be a library name, not a filename; the
filename will be inferred from the library name. 'output_dir' is
the directory where the library file will be put.
'debug' is a boolean; if true, debugging information will be
included in the library (note that on most platforms, it is the
compile step where this matters: the 'debug' flag is included here
just for consistency).
'target_lang' is the target language for which the given objects
are being compiled. This allows specific linkage time treatment of
certain languages.
Raises LibError on failure.
|
Link a bunch of stuff together to create a static library file.
The "bunch of stuff" consists of the list of object files supplied
as 'objects', the extra object files supplied to
'add_link_object()' and/or 'set_link_objects()', the libraries
supplied to 'add_library()' and/or 'set_libraries()', and the
libraries supplied as 'libraries' (if any).
|
[
"Link",
"a",
"bunch",
"of",
"stuff",
"together",
"to",
"create",
"a",
"static",
"library",
"file",
".",
"The",
"bunch",
"of",
"stuff",
"consists",
"of",
"the",
"list",
"of",
"object",
"files",
"supplied",
"as",
"objects",
"the",
"extra",
"object",
"files",
"supplied",
"to",
"add_link_object",
"()",
"and",
"/",
"or",
"set_link_objects",
"()",
"the",
"libraries",
"supplied",
"to",
"add_library",
"()",
"and",
"/",
"or",
"set_libraries",
"()",
"and",
"the",
"libraries",
"supplied",
"as",
"libraries",
"(",
"if",
"any",
")",
"."
] |
def create_static_lib(self, objects, output_libname, output_dir=None,
debug=0, target_lang=None):
"""Link a bunch of stuff together to create a static library file.
The "bunch of stuff" consists of the list of object files supplied
as 'objects', the extra object files supplied to
'add_link_object()' and/or 'set_link_objects()', the libraries
supplied to 'add_library()' and/or 'set_libraries()', and the
libraries supplied as 'libraries' (if any).
'output_libname' should be a library name, not a filename; the
filename will be inferred from the library name. 'output_dir' is
the directory where the library file will be put.
'debug' is a boolean; if true, debugging information will be
included in the library (note that on most platforms, it is the
compile step where this matters: the 'debug' flag is included here
just for consistency).
'target_lang' is the target language for which the given objects
are being compiled. This allows specific linkage time treatment of
certain languages.
Raises LibError on failure.
"""
pass
|
[
"def",
"create_static_lib",
"(",
"self",
",",
"objects",
",",
"output_libname",
",",
"output_dir",
"=",
"None",
",",
"debug",
"=",
"0",
",",
"target_lang",
"=",
"None",
")",
":",
"pass"
] |
https://github.com/exodrifter/unity-python/blob/bef6e4e9ddfbbf1eaf7acbbb973e9aa3dd64a20d/Lib/distutils/ccompiler.py#L586-L610
|
||
securityclippy/elasticintel
|
aa08d3e9f5ab1c000128e95161139ce97ff0e334
|
ingest_feed_lambda/numpy/ma/extras.py
|
python
|
_ezclump
|
(mask)
|
return r
|
Finds the clumps (groups of data with the same values) for a 1D bool array.
Returns a series of slices.
|
Finds the clumps (groups of data with the same values) for a 1D bool array.
|
[
"Finds",
"the",
"clumps",
"(",
"groups",
"of",
"data",
"with",
"the",
"same",
"values",
")",
"for",
"a",
"1D",
"bool",
"array",
"."
] |
def _ezclump(mask):
"""
Finds the clumps (groups of data with the same values) for a 1D bool array.
Returns a series of slices.
"""
if mask.ndim > 1:
mask = mask.ravel()
idx = (mask[1:] ^ mask[:-1]).nonzero()
idx = idx[0] + 1
if mask[0]:
if len(idx) == 0:
return [slice(0, mask.size)]
r = [slice(0, idx[0])]
r.extend((slice(left, right)
for left, right in zip(idx[1:-1:2], idx[2::2])))
else:
if len(idx) == 0:
return []
r = [slice(left, right) for left, right in zip(idx[:-1:2], idx[1::2])]
if mask[-1]:
r.append(slice(idx[-1], mask.size))
return r
|
[
"def",
"_ezclump",
"(",
"mask",
")",
":",
"if",
"mask",
".",
"ndim",
">",
"1",
":",
"mask",
"=",
"mask",
".",
"ravel",
"(",
")",
"idx",
"=",
"(",
"mask",
"[",
"1",
":",
"]",
"^",
"mask",
"[",
":",
"-",
"1",
"]",
")",
".",
"nonzero",
"(",
")",
"idx",
"=",
"idx",
"[",
"0",
"]",
"+",
"1",
"if",
"mask",
"[",
"0",
"]",
":",
"if",
"len",
"(",
"idx",
")",
"==",
"0",
":",
"return",
"[",
"slice",
"(",
"0",
",",
"mask",
".",
"size",
")",
"]",
"r",
"=",
"[",
"slice",
"(",
"0",
",",
"idx",
"[",
"0",
"]",
")",
"]",
"r",
".",
"extend",
"(",
"(",
"slice",
"(",
"left",
",",
"right",
")",
"for",
"left",
",",
"right",
"in",
"zip",
"(",
"idx",
"[",
"1",
":",
"-",
"1",
":",
"2",
"]",
",",
"idx",
"[",
"2",
":",
":",
"2",
"]",
")",
")",
")",
"else",
":",
"if",
"len",
"(",
"idx",
")",
"==",
"0",
":",
"return",
"[",
"]",
"r",
"=",
"[",
"slice",
"(",
"left",
",",
"right",
")",
"for",
"left",
",",
"right",
"in",
"zip",
"(",
"idx",
"[",
":",
"-",
"1",
":",
"2",
"]",
",",
"idx",
"[",
"1",
":",
":",
"2",
"]",
")",
"]",
"if",
"mask",
"[",
"-",
"1",
"]",
":",
"r",
".",
"append",
"(",
"slice",
"(",
"idx",
"[",
"-",
"1",
"]",
",",
"mask",
".",
"size",
")",
")",
"return",
"r"
] |
https://github.com/securityclippy/elasticintel/blob/aa08d3e9f5ab1c000128e95161139ce97ff0e334/ingest_feed_lambda/numpy/ma/extras.py#L1722-L1748
|
|
google/coursebuilder-core
|
08f809db3226d9269e30d5edd0edd33bd22041f4
|
coursebuilder/models/data_removal.py
|
python
|
Registry.register_indexed_by_user_id_remover
|
(cls, remover)
|
Register a function that can remove instances by user_id.
These items are treated differently from un-indexed items, because we
want to be able to very rapidly remove the bulk of the data for a
given user. Items that are keyed or indexed by user ID tend to
contain more sensitive PII; non-indexed items will generally be more
along the lines of user events, etc.
Also, immediately removing the user record will prevent re-login, and
that's important for giving users the strong feedback that on
un-register they really have had their stuff removed.
Args:
remover: A function to remove DB instances that are indexable by
user ID. The function must take exactly one parameter: The
string constituting the user_id. (This is the string
returned from users.get_current_user().user_id()).
|
Register a function that can remove instances by user_id.
|
[
"Register",
"a",
"function",
"that",
"can",
"remove",
"instances",
"by",
"user_id",
"."
] |
def register_indexed_by_user_id_remover(cls, remover):
"""Register a function that can remove instances by user_id.
These items are treated differently from un-indexed items, because we
want to be able to very rapidly remove the bulk of the data for a
given user. Items that are keyed or indexed by user ID tend to
contain more sensitive PII; non-indexed items will generally be more
along the lines of user events, etc.
Also, immediately removing the user record will prevent re-login, and
that's important for giving users the strong feedback that on
un-register they really have had their stuff removed.
Args:
remover: A function to remove DB instances that are indexable by
user ID. The function must take exactly one parameter: The
string constituting the user_id. (This is the string
returned from users.get_current_user().user_id()).
"""
cls._remove_by_user_id_functions.append(remover)
|
[
"def",
"register_indexed_by_user_id_remover",
"(",
"cls",
",",
"remover",
")",
":",
"cls",
".",
"_remove_by_user_id_functions",
".",
"append",
"(",
"remover",
")"
] |
https://github.com/google/coursebuilder-core/blob/08f809db3226d9269e30d5edd0edd33bd22041f4/coursebuilder/models/data_removal.py#L49-L68
|
||
eugene-eeo/graphlite
|
8d17e9549ee8610570dcde1b427431a2584395b7
|
graphlite/transaction.py
|
python
|
Transaction.abort
|
(self)
|
Raises an ``AbortSignal``. If you used the
``Graph.transaction`` context manager this
exception is automatically caught and ignored.
|
Raises an ``AbortSignal``. If you used the
``Graph.transaction`` context manager this
exception is automatically caught and ignored.
|
[
"Raises",
"an",
"AbortSignal",
".",
"If",
"you",
"used",
"the",
"Graph",
".",
"transaction",
"context",
"manager",
"this",
"exception",
"is",
"automatically",
"caught",
"and",
"ignored",
"."
] |
def abort(self):
"""
Raises an ``AbortSignal``. If you used the
``Graph.transaction`` context manager this
exception is automatically caught and ignored.
"""
self.clear()
raise AbortSignal
|
[
"def",
"abort",
"(",
"self",
")",
":",
"self",
".",
"clear",
"(",
")",
"raise",
"AbortSignal"
] |
https://github.com/eugene-eeo/graphlite/blob/8d17e9549ee8610570dcde1b427431a2584395b7/graphlite/transaction.py#L67-L74
|
||
4shadoww/hakkuframework
|
409a11fc3819d251f86faa3473439f8c19066a21
|
lib/future/backports/misc.py
|
python
|
ceil
|
(x)
|
return int(oldceil(x))
|
Return the ceiling of x as an int.
This is the smallest integral value >= x.
|
Return the ceiling of x as an int.
This is the smallest integral value >= x.
|
[
"Return",
"the",
"ceiling",
"of",
"x",
"as",
"an",
"int",
".",
"This",
"is",
"the",
"smallest",
"integral",
"value",
">",
"=",
"x",
"."
] |
def ceil(x):
"""
Return the ceiling of x as an int.
This is the smallest integral value >= x.
"""
return int(oldceil(x))
|
[
"def",
"ceil",
"(",
"x",
")",
":",
"return",
"int",
"(",
"oldceil",
"(",
"x",
")",
")"
] |
https://github.com/4shadoww/hakkuframework/blob/409a11fc3819d251f86faa3473439f8c19066a21/lib/future/backports/misc.py#L35-L40
|
|
midgetspy/Sick-Beard
|
171a607e41b7347a74cc815f6ecce7968d9acccf
|
lib/configobj.py
|
python
|
ConfigObj._match_depth
|
(self, sect, depth)
|
Given a section and a depth level, walk back through the sections
parents to see if the depth level matches a previous section.
Return a reference to the right section,
or raise a SyntaxError.
|
Given a section and a depth level, walk back through the sections
parents to see if the depth level matches a previous section.
Return a reference to the right section,
or raise a SyntaxError.
|
[
"Given",
"a",
"section",
"and",
"a",
"depth",
"level",
"walk",
"back",
"through",
"the",
"sections",
"parents",
"to",
"see",
"if",
"the",
"depth",
"level",
"matches",
"a",
"previous",
"section",
".",
"Return",
"a",
"reference",
"to",
"the",
"right",
"section",
"or",
"raise",
"a",
"SyntaxError",
"."
] |
def _match_depth(self, sect, depth):
"""
Given a section and a depth level, walk back through the sections
parents to see if the depth level matches a previous section.
Return a reference to the right section,
or raise a SyntaxError.
"""
while depth < sect.depth:
if sect is sect.parent:
# we've reached the top level already
raise SyntaxError()
sect = sect.parent
if sect.depth == depth:
return sect
# shouldn't get here
raise SyntaxError()
|
[
"def",
"_match_depth",
"(",
"self",
",",
"sect",
",",
"depth",
")",
":",
"while",
"depth",
"<",
"sect",
".",
"depth",
":",
"if",
"sect",
"is",
"sect",
".",
"parent",
":",
"# we've reached the top level already",
"raise",
"SyntaxError",
"(",
")",
"sect",
"=",
"sect",
".",
"parent",
"if",
"sect",
".",
"depth",
"==",
"depth",
":",
"return",
"sect",
"# shouldn't get here",
"raise",
"SyntaxError",
"(",
")"
] |
https://github.com/midgetspy/Sick-Beard/blob/171a607e41b7347a74cc815f6ecce7968d9acccf/lib/configobj.py#L1666-L1682
|
||
biopython/biopython
|
2dd97e71762af7b046d7f7f8a4f1e38db6b06c86
|
Bio/Graphics/GenomeDiagram/_CircularDrawer.py
|
python
|
CircularDrawer._draw_arc_poly
|
(
self,
inner_radius,
outer_radius,
inner_startangle,
inner_endangle,
outer_startangle,
outer_endangle,
color,
border=None,
flip=False,
**kwargs,
)
|
Return polygon path describing an arc.
|
Return polygon path describing an arc.
|
[
"Return",
"polygon",
"path",
"describing",
"an",
"arc",
"."
] |
def _draw_arc_poly(
self,
inner_radius,
outer_radius,
inner_startangle,
inner_endangle,
outer_startangle,
outer_endangle,
color,
border=None,
flip=False,
**kwargs,
):
"""Return polygon path describing an arc."""
strokecolor, color = _stroke_and_fill_colors(color, border)
x0, y0 = self.xcenter, self.ycenter # origin of the circle
if (
abs(inner_endangle - outer_startangle) > 0.01
or abs(outer_endangle - inner_startangle) > 0.01
or abs(inner_startangle - outer_startangle) > 0.01
or abs(outer_startangle - outer_startangle) > 0.01
):
# Wide arc, must use full curves
p = ArcPath(
strokeColor=strokecolor,
fillColor=color,
# default is mitre/miter which can stick out too much:
strokeLineJoin=1, # 1=round
strokewidth=0,
)
# Note reportlab counts angles anti-clockwise from the horizontal
# (as in mathematics, e.g. complex numbers and polar coordinates)
# but we use clockwise from the vertical. Also reportlab uses
# degrees, but we use radians.
i_start = 90 - (inner_startangle * 180 / pi)
i_end = 90 - (inner_endangle * 180 / pi)
o_start = 90 - (outer_startangle * 180 / pi)
o_end = 90 - (outer_endangle * 180 / pi)
p.addArc(x0, y0, inner_radius, i_end, i_start, moveTo=True, reverse=True)
if flip:
# Flipped, join end to start,
self._draw_arc_line(p, inner_radius, outer_radius, i_end, o_start)
p.addArc(x0, y0, outer_radius, o_end, o_start, reverse=True)
self._draw_arc_line(p, outer_radius, inner_radius, o_end, i_start)
else:
# Not flipped, join start to start, end to end
self._draw_arc_line(p, inner_radius, outer_radius, i_end, o_end)
p.addArc(x0, y0, outer_radius, o_end, o_start, reverse=False)
self._draw_arc_line(p, outer_radius, inner_radius, o_start, i_start)
p.closePath()
return p
else:
# Cheat and just use a four sided polygon.
# Calculate trig values for angle and coordinates
inner_startcos, inner_startsin = (
cos(inner_startangle),
sin(inner_startangle),
)
inner_endcos, inner_endsin = cos(inner_endangle), sin(inner_endangle)
outer_startcos, outer_startsin = (
cos(outer_startangle),
sin(outer_startangle),
)
outer_endcos, outer_endsin = cos(outer_endangle), sin(outer_endangle)
x1, y1 = (
x0 + inner_radius * inner_startsin,
y0 + inner_radius * inner_startcos,
)
x2, y2 = (
x0 + inner_radius * inner_endsin,
y0 + inner_radius * inner_endcos,
)
x3, y3 = (
x0 + outer_radius * outer_endsin,
y0 + outer_radius * outer_endcos,
)
x4, y4 = (
x0 + outer_radius * outer_startsin,
y0 + outer_radius * outer_startcos,
)
return draw_polygon(
[(x1, y1), (x2, y2), (x3, y3), (x4, y4)],
color,
border,
# default is mitre/miter which can stick out too much:
strokeLineJoin=1, # 1=round
)
|
[
"def",
"_draw_arc_poly",
"(",
"self",
",",
"inner_radius",
",",
"outer_radius",
",",
"inner_startangle",
",",
"inner_endangle",
",",
"outer_startangle",
",",
"outer_endangle",
",",
"color",
",",
"border",
"=",
"None",
",",
"flip",
"=",
"False",
",",
"*",
"*",
"kwargs",
",",
")",
":",
"strokecolor",
",",
"color",
"=",
"_stroke_and_fill_colors",
"(",
"color",
",",
"border",
")",
"x0",
",",
"y0",
"=",
"self",
".",
"xcenter",
",",
"self",
".",
"ycenter",
"# origin of the circle",
"if",
"(",
"abs",
"(",
"inner_endangle",
"-",
"outer_startangle",
")",
">",
"0.01",
"or",
"abs",
"(",
"outer_endangle",
"-",
"inner_startangle",
")",
">",
"0.01",
"or",
"abs",
"(",
"inner_startangle",
"-",
"outer_startangle",
")",
">",
"0.01",
"or",
"abs",
"(",
"outer_startangle",
"-",
"outer_startangle",
")",
">",
"0.01",
")",
":",
"# Wide arc, must use full curves",
"p",
"=",
"ArcPath",
"(",
"strokeColor",
"=",
"strokecolor",
",",
"fillColor",
"=",
"color",
",",
"# default is mitre/miter which can stick out too much:",
"strokeLineJoin",
"=",
"1",
",",
"# 1=round",
"strokewidth",
"=",
"0",
",",
")",
"# Note reportlab counts angles anti-clockwise from the horizontal",
"# (as in mathematics, e.g. complex numbers and polar coordinates)",
"# but we use clockwise from the vertical. Also reportlab uses",
"# degrees, but we use radians.",
"i_start",
"=",
"90",
"-",
"(",
"inner_startangle",
"*",
"180",
"/",
"pi",
")",
"i_end",
"=",
"90",
"-",
"(",
"inner_endangle",
"*",
"180",
"/",
"pi",
")",
"o_start",
"=",
"90",
"-",
"(",
"outer_startangle",
"*",
"180",
"/",
"pi",
")",
"o_end",
"=",
"90",
"-",
"(",
"outer_endangle",
"*",
"180",
"/",
"pi",
")",
"p",
".",
"addArc",
"(",
"x0",
",",
"y0",
",",
"inner_radius",
",",
"i_end",
",",
"i_start",
",",
"moveTo",
"=",
"True",
",",
"reverse",
"=",
"True",
")",
"if",
"flip",
":",
"# Flipped, join end to start,",
"self",
".",
"_draw_arc_line",
"(",
"p",
",",
"inner_radius",
",",
"outer_radius",
",",
"i_end",
",",
"o_start",
")",
"p",
".",
"addArc",
"(",
"x0",
",",
"y0",
",",
"outer_radius",
",",
"o_end",
",",
"o_start",
",",
"reverse",
"=",
"True",
")",
"self",
".",
"_draw_arc_line",
"(",
"p",
",",
"outer_radius",
",",
"inner_radius",
",",
"o_end",
",",
"i_start",
")",
"else",
":",
"# Not flipped, join start to start, end to end",
"self",
".",
"_draw_arc_line",
"(",
"p",
",",
"inner_radius",
",",
"outer_radius",
",",
"i_end",
",",
"o_end",
")",
"p",
".",
"addArc",
"(",
"x0",
",",
"y0",
",",
"outer_radius",
",",
"o_end",
",",
"o_start",
",",
"reverse",
"=",
"False",
")",
"self",
".",
"_draw_arc_line",
"(",
"p",
",",
"outer_radius",
",",
"inner_radius",
",",
"o_start",
",",
"i_start",
")",
"p",
".",
"closePath",
"(",
")",
"return",
"p",
"else",
":",
"# Cheat and just use a four sided polygon.",
"# Calculate trig values for angle and coordinates",
"inner_startcos",
",",
"inner_startsin",
"=",
"(",
"cos",
"(",
"inner_startangle",
")",
",",
"sin",
"(",
"inner_startangle",
")",
",",
")",
"inner_endcos",
",",
"inner_endsin",
"=",
"cos",
"(",
"inner_endangle",
")",
",",
"sin",
"(",
"inner_endangle",
")",
"outer_startcos",
",",
"outer_startsin",
"=",
"(",
"cos",
"(",
"outer_startangle",
")",
",",
"sin",
"(",
"outer_startangle",
")",
",",
")",
"outer_endcos",
",",
"outer_endsin",
"=",
"cos",
"(",
"outer_endangle",
")",
",",
"sin",
"(",
"outer_endangle",
")",
"x1",
",",
"y1",
"=",
"(",
"x0",
"+",
"inner_radius",
"*",
"inner_startsin",
",",
"y0",
"+",
"inner_radius",
"*",
"inner_startcos",
",",
")",
"x2",
",",
"y2",
"=",
"(",
"x0",
"+",
"inner_radius",
"*",
"inner_endsin",
",",
"y0",
"+",
"inner_radius",
"*",
"inner_endcos",
",",
")",
"x3",
",",
"y3",
"=",
"(",
"x0",
"+",
"outer_radius",
"*",
"outer_endsin",
",",
"y0",
"+",
"outer_radius",
"*",
"outer_endcos",
",",
")",
"x4",
",",
"y4",
"=",
"(",
"x0",
"+",
"outer_radius",
"*",
"outer_startsin",
",",
"y0",
"+",
"outer_radius",
"*",
"outer_startcos",
",",
")",
"return",
"draw_polygon",
"(",
"[",
"(",
"x1",
",",
"y1",
")",
",",
"(",
"x2",
",",
"y2",
")",
",",
"(",
"x3",
",",
"y3",
")",
",",
"(",
"x4",
",",
"y4",
")",
"]",
",",
"color",
",",
"border",
",",
"# default is mitre/miter which can stick out too much:",
"strokeLineJoin",
"=",
"1",
",",
"# 1=round",
")"
] |
https://github.com/biopython/biopython/blob/2dd97e71762af7b046d7f7f8a4f1e38db6b06c86/Bio/Graphics/GenomeDiagram/_CircularDrawer.py#L1185-L1272
|
||
sympy/sympy
|
d822fcba181155b85ff2b29fe525adbafb22b448
|
sympy/physics/quantum/pauli.py
|
python
|
qsimplify_pauli
|
(e)
|
return e
|
Simplify an expression that includes products of pauli operators.
Parameters
==========
e : expression
An expression that contains products of Pauli operators that is
to be simplified.
Examples
========
>>> from sympy.physics.quantum.pauli import SigmaX, SigmaY
>>> from sympy.physics.quantum.pauli import qsimplify_pauli
>>> sx, sy = SigmaX(), SigmaY()
>>> sx * sy
SigmaX()*SigmaY()
>>> qsimplify_pauli(sx * sy)
I*SigmaZ()
|
Simplify an expression that includes products of pauli operators.
|
[
"Simplify",
"an",
"expression",
"that",
"includes",
"products",
"of",
"pauli",
"operators",
"."
] |
def qsimplify_pauli(e):
"""
Simplify an expression that includes products of pauli operators.
Parameters
==========
e : expression
An expression that contains products of Pauli operators that is
to be simplified.
Examples
========
>>> from sympy.physics.quantum.pauli import SigmaX, SigmaY
>>> from sympy.physics.quantum.pauli import qsimplify_pauli
>>> sx, sy = SigmaX(), SigmaY()
>>> sx * sy
SigmaX()*SigmaY()
>>> qsimplify_pauli(sx * sy)
I*SigmaZ()
"""
if isinstance(e, Operator):
return e
if isinstance(e, (Add, Pow, exp)):
t = type(e)
return t(*(qsimplify_pauli(arg) for arg in e.args))
if isinstance(e, Mul):
c, nc = e.args_cnc()
nc_s = []
while nc:
curr = nc.pop(0)
while (len(nc) and
isinstance(curr, SigmaOpBase) and
isinstance(nc[0], SigmaOpBase) and
curr.name == nc[0].name):
x = nc.pop(0)
y = _qsimplify_pauli_product(curr, x)
c1, nc1 = y.args_cnc()
curr = Mul(*nc1)
c = c + c1
nc_s.append(curr)
return Mul(*c) * Mul(*nc_s)
return e
|
[
"def",
"qsimplify_pauli",
"(",
"e",
")",
":",
"if",
"isinstance",
"(",
"e",
",",
"Operator",
")",
":",
"return",
"e",
"if",
"isinstance",
"(",
"e",
",",
"(",
"Add",
",",
"Pow",
",",
"exp",
")",
")",
":",
"t",
"=",
"type",
"(",
"e",
")",
"return",
"t",
"(",
"*",
"(",
"qsimplify_pauli",
"(",
"arg",
")",
"for",
"arg",
"in",
"e",
".",
"args",
")",
")",
"if",
"isinstance",
"(",
"e",
",",
"Mul",
")",
":",
"c",
",",
"nc",
"=",
"e",
".",
"args_cnc",
"(",
")",
"nc_s",
"=",
"[",
"]",
"while",
"nc",
":",
"curr",
"=",
"nc",
".",
"pop",
"(",
"0",
")",
"while",
"(",
"len",
"(",
"nc",
")",
"and",
"isinstance",
"(",
"curr",
",",
"SigmaOpBase",
")",
"and",
"isinstance",
"(",
"nc",
"[",
"0",
"]",
",",
"SigmaOpBase",
")",
"and",
"curr",
".",
"name",
"==",
"nc",
"[",
"0",
"]",
".",
"name",
")",
":",
"x",
"=",
"nc",
".",
"pop",
"(",
"0",
")",
"y",
"=",
"_qsimplify_pauli_product",
"(",
"curr",
",",
"x",
")",
"c1",
",",
"nc1",
"=",
"y",
".",
"args_cnc",
"(",
")",
"curr",
"=",
"Mul",
"(",
"*",
"nc1",
")",
"c",
"=",
"c",
"+",
"c1",
"nc_s",
".",
"append",
"(",
"curr",
")",
"return",
"Mul",
"(",
"*",
"c",
")",
"*",
"Mul",
"(",
"*",
"nc_s",
")",
"return",
"e"
] |
https://github.com/sympy/sympy/blob/d822fcba181155b85ff2b29fe525adbafb22b448/sympy/physics/quantum/pauli.py#L623-L675
|
|
pyenchant/pyenchant
|
fc2a4a3fca6a55d510d01455b814aa27cdfc961e
|
enchant/__init__.py
|
python
|
Broker.set_ordering
|
(self, tag: str, ordering: str)
|
Set dictionary preferences for a language.
The Enchant library supports the use of multiple dictionary programs
and multiple languages. This method specifies which dictionaries
the broker should prefer when dealing with a given language. `tag`
must be an appropriate language specification and `ordering` is a
string listing the dictionaries in order of preference. For example
a valid ordering might be "aspell,myspell,ispell".
The value of `tag` can also be set to "*" to set a default ordering
for all languages for which one has not been set explicitly.
|
Set dictionary preferences for a language.
|
[
"Set",
"dictionary",
"preferences",
"for",
"a",
"language",
"."
] |
def set_ordering(self, tag: str, ordering: str) -> None:
"""Set dictionary preferences for a language.
The Enchant library supports the use of multiple dictionary programs
and multiple languages. This method specifies which dictionaries
the broker should prefer when dealing with a given language. `tag`
must be an appropriate language specification and `ordering` is a
string listing the dictionaries in order of preference. For example
a valid ordering might be "aspell,myspell,ispell".
The value of `tag` can also be set to "*" to set a default ordering
for all languages for which one has not been set explicitly.
"""
self._check_this()
_e.broker_set_ordering(self._this, tag.encode(), ordering.encode())
|
[
"def",
"set_ordering",
"(",
"self",
",",
"tag",
":",
"str",
",",
"ordering",
":",
"str",
")",
"->",
"None",
":",
"self",
".",
"_check_this",
"(",
")",
"_e",
".",
"broker_set_ordering",
"(",
"self",
".",
"_this",
",",
"tag",
".",
"encode",
"(",
")",
",",
"ordering",
".",
"encode",
"(",
")",
")"
] |
https://github.com/pyenchant/pyenchant/blob/fc2a4a3fca6a55d510d01455b814aa27cdfc961e/enchant/__init__.py#L354-L367
|
||
carmaa/inception
|
6c09195f1318ae66010d629b1a86c10524251e26
|
inception/external/pymetasploit/metasploit/msfrpc.py
|
python
|
Workspace.events
|
(self)
|
return EventsTable(self.rpc, self.name)
|
Returns the events table for the current workspace.
|
Returns the events table for the current workspace.
|
[
"Returns",
"the",
"events",
"table",
"for",
"the",
"current",
"workspace",
"."
] |
def events(self):
"""
Returns the events table for the current workspace.
"""
return EventsTable(self.rpc, self.name)
|
[
"def",
"events",
"(",
"self",
")",
":",
"return",
"EventsTable",
"(",
"self",
".",
"rpc",
",",
"self",
".",
"name",
")"
] |
https://github.com/carmaa/inception/blob/6c09195f1318ae66010d629b1a86c10524251e26/inception/external/pymetasploit/metasploit/msfrpc.py#L943-L947
|
|
equinor/segyio
|
a98c2bc21d238de00b9b65be331d7a011d8a6372
|
python/segyio/create.py
|
python
|
default_text_header
|
(iline, xline, offset)
|
return bytes(rows)
|
[] |
def default_text_header(iline, xline, offset):
lines = {
1: "DATE %s" % datetime.date.today().isoformat(),
2: "AN INCREASE IN AMPLITUDE EQUALS AN INCREASE IN ACOUSTIC IMPEDANCE",
3: "Written by libsegyio (python)",
11: "TRACE HEADER POSITION:",
12: " INLINE BYTES %03d-%03d | OFFSET BYTES %03d-%03d" % (iline, iline + 4, int(offset), int(offset) + 4),
13: " CROSSLINE BYTES %03d-%03d |" % (xline, xline + 4),
15: "END EBCDIC HEADER",
}
rows = segyio.create_text_header(lines)
rows = bytearray(rows, 'ascii') # mutable array of bytes
rows[-1] = 128 # \x80 -- Unsure if this is really required...
return bytes(rows)
|
[
"def",
"default_text_header",
"(",
"iline",
",",
"xline",
",",
"offset",
")",
":",
"lines",
"=",
"{",
"1",
":",
"\"DATE %s\"",
"%",
"datetime",
".",
"date",
".",
"today",
"(",
")",
".",
"isoformat",
"(",
")",
",",
"2",
":",
"\"AN INCREASE IN AMPLITUDE EQUALS AN INCREASE IN ACOUSTIC IMPEDANCE\"",
",",
"3",
":",
"\"Written by libsegyio (python)\"",
",",
"11",
":",
"\"TRACE HEADER POSITION:\"",
",",
"12",
":",
"\" INLINE BYTES %03d-%03d | OFFSET BYTES %03d-%03d\"",
"%",
"(",
"iline",
",",
"iline",
"+",
"4",
",",
"int",
"(",
"offset",
")",
",",
"int",
"(",
"offset",
")",
"+",
"4",
")",
",",
"13",
":",
"\" CROSSLINE BYTES %03d-%03d |\"",
"%",
"(",
"xline",
",",
"xline",
"+",
"4",
")",
",",
"15",
":",
"\"END EBCDIC HEADER\"",
",",
"}",
"rows",
"=",
"segyio",
".",
"create_text_header",
"(",
"lines",
")",
"rows",
"=",
"bytearray",
"(",
"rows",
",",
"'ascii'",
")",
"# mutable array of bytes",
"rows",
"[",
"-",
"1",
"]",
"=",
"128",
"# \\x80 -- Unsure if this is really required...",
"return",
"bytes",
"(",
"rows",
")"
] |
https://github.com/equinor/segyio/blob/a98c2bc21d238de00b9b65be331d7a011d8a6372/python/segyio/create.py#L7-L20
|
|||
pymedusa/Medusa
|
1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38
|
medusa/tv/series.py
|
python
|
Series.whitelist
|
(self)
|
return self.release_groups.whitelist
|
Return the anime's whitelisted release groups.
|
Return the anime's whitelisted release groups.
|
[
"Return",
"the",
"anime",
"s",
"whitelisted",
"release",
"groups",
"."
] |
def whitelist(self):
"""Return the anime's whitelisted release groups."""
return self.release_groups.whitelist
|
[
"def",
"whitelist",
"(",
"self",
")",
":",
"return",
"self",
".",
"release_groups",
".",
"whitelist"
] |
https://github.com/pymedusa/Medusa/blob/1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38/medusa/tv/series.py#L754-L756
|
|
python/mypy
|
17850b3bd77ae9efb5d21f656c4e4e05ac48d894
|
mypy/errors.py
|
python
|
Errors.render_messages
|
(self,
errors: List[ErrorInfo])
|
return result
|
Translate the messages into a sequence of tuples.
Each tuple is of form (path, line, col, severity, message, allow_dups, code).
The rendered sequence includes information about error contexts.
The path item may be None. If the line item is negative, the
line number is not defined for the tuple.
|
Translate the messages into a sequence of tuples.
|
[
"Translate",
"the",
"messages",
"into",
"a",
"sequence",
"of",
"tuples",
"."
] |
def render_messages(self,
errors: List[ErrorInfo]) -> List[ErrorTuple]:
"""Translate the messages into a sequence of tuples.
Each tuple is of form (path, line, col, severity, message, allow_dups, code).
The rendered sequence includes information about error contexts.
The path item may be None. If the line item is negative, the
line number is not defined for the tuple.
"""
result: List[ErrorTuple] = []
prev_import_context: List[Tuple[str, int]] = []
prev_function_or_member: Optional[str] = None
prev_type: Optional[str] = None
for e in errors:
# Report module import context, if different from previous message.
if not self.show_error_context:
pass
elif e.import_ctx != prev_import_context:
last = len(e.import_ctx) - 1
i = last
while i >= 0:
path, line = e.import_ctx[i]
fmt = '{}:{}: note: In module imported here'
if i < last:
fmt = '{}:{}: note: ... from here'
if i > 0:
fmt += ','
else:
fmt += ':'
# Remove prefix to ignore from path (if present) to
# simplify path.
path = remove_path_prefix(path, self.ignore_prefix)
result.append((None, -1, -1, 'note',
fmt.format(path, line), e.allow_dups, None))
i -= 1
file = self.simplify_path(e.file)
# Report context within a source file.
if not self.show_error_context:
pass
elif (e.function_or_member != prev_function_or_member or
e.type != prev_type):
if e.function_or_member is None:
if e.type is None:
result.append((file, -1, -1, 'note', 'At top level:', e.allow_dups, None))
else:
result.append((file, -1, -1, 'note', 'In class "{}":'.format(
e.type), e.allow_dups, None))
else:
if e.type is None:
result.append((file, -1, -1, 'note',
'In function "{}":'.format(
e.function_or_member), e.allow_dups, None))
else:
result.append((file, -1, -1, 'note',
'In member "{}" of class "{}":'.format(
e.function_or_member, e.type), e.allow_dups, None))
elif e.type != prev_type:
if e.type is None:
result.append((file, -1, -1, 'note', 'At top level:', e.allow_dups, None))
else:
result.append((file, -1, -1, 'note',
'In class "{}":'.format(e.type), e.allow_dups, None))
if isinstance(e.message, ErrorMessage):
result.append(
(file, e.line, e.column, e.severity, e.message.value, e.allow_dups, e.code))
else:
result.append(
(file, e.line, e.column, e.severity, e.message, e.allow_dups, e.code))
prev_import_context = e.import_ctx
prev_function_or_member = e.function_or_member
prev_type = e.type
return result
|
[
"def",
"render_messages",
"(",
"self",
",",
"errors",
":",
"List",
"[",
"ErrorInfo",
"]",
")",
"->",
"List",
"[",
"ErrorTuple",
"]",
":",
"result",
":",
"List",
"[",
"ErrorTuple",
"]",
"=",
"[",
"]",
"prev_import_context",
":",
"List",
"[",
"Tuple",
"[",
"str",
",",
"int",
"]",
"]",
"=",
"[",
"]",
"prev_function_or_member",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
"prev_type",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
"for",
"e",
"in",
"errors",
":",
"# Report module import context, if different from previous message.",
"if",
"not",
"self",
".",
"show_error_context",
":",
"pass",
"elif",
"e",
".",
"import_ctx",
"!=",
"prev_import_context",
":",
"last",
"=",
"len",
"(",
"e",
".",
"import_ctx",
")",
"-",
"1",
"i",
"=",
"last",
"while",
"i",
">=",
"0",
":",
"path",
",",
"line",
"=",
"e",
".",
"import_ctx",
"[",
"i",
"]",
"fmt",
"=",
"'{}:{}: note: In module imported here'",
"if",
"i",
"<",
"last",
":",
"fmt",
"=",
"'{}:{}: note: ... from here'",
"if",
"i",
">",
"0",
":",
"fmt",
"+=",
"','",
"else",
":",
"fmt",
"+=",
"':'",
"# Remove prefix to ignore from path (if present) to",
"# simplify path.",
"path",
"=",
"remove_path_prefix",
"(",
"path",
",",
"self",
".",
"ignore_prefix",
")",
"result",
".",
"append",
"(",
"(",
"None",
",",
"-",
"1",
",",
"-",
"1",
",",
"'note'",
",",
"fmt",
".",
"format",
"(",
"path",
",",
"line",
")",
",",
"e",
".",
"allow_dups",
",",
"None",
")",
")",
"i",
"-=",
"1",
"file",
"=",
"self",
".",
"simplify_path",
"(",
"e",
".",
"file",
")",
"# Report context within a source file.",
"if",
"not",
"self",
".",
"show_error_context",
":",
"pass",
"elif",
"(",
"e",
".",
"function_or_member",
"!=",
"prev_function_or_member",
"or",
"e",
".",
"type",
"!=",
"prev_type",
")",
":",
"if",
"e",
".",
"function_or_member",
"is",
"None",
":",
"if",
"e",
".",
"type",
"is",
"None",
":",
"result",
".",
"append",
"(",
"(",
"file",
",",
"-",
"1",
",",
"-",
"1",
",",
"'note'",
",",
"'At top level:'",
",",
"e",
".",
"allow_dups",
",",
"None",
")",
")",
"else",
":",
"result",
".",
"append",
"(",
"(",
"file",
",",
"-",
"1",
",",
"-",
"1",
",",
"'note'",
",",
"'In class \"{}\":'",
".",
"format",
"(",
"e",
".",
"type",
")",
",",
"e",
".",
"allow_dups",
",",
"None",
")",
")",
"else",
":",
"if",
"e",
".",
"type",
"is",
"None",
":",
"result",
".",
"append",
"(",
"(",
"file",
",",
"-",
"1",
",",
"-",
"1",
",",
"'note'",
",",
"'In function \"{}\":'",
".",
"format",
"(",
"e",
".",
"function_or_member",
")",
",",
"e",
".",
"allow_dups",
",",
"None",
")",
")",
"else",
":",
"result",
".",
"append",
"(",
"(",
"file",
",",
"-",
"1",
",",
"-",
"1",
",",
"'note'",
",",
"'In member \"{}\" of class \"{}\":'",
".",
"format",
"(",
"e",
".",
"function_or_member",
",",
"e",
".",
"type",
")",
",",
"e",
".",
"allow_dups",
",",
"None",
")",
")",
"elif",
"e",
".",
"type",
"!=",
"prev_type",
":",
"if",
"e",
".",
"type",
"is",
"None",
":",
"result",
".",
"append",
"(",
"(",
"file",
",",
"-",
"1",
",",
"-",
"1",
",",
"'note'",
",",
"'At top level:'",
",",
"e",
".",
"allow_dups",
",",
"None",
")",
")",
"else",
":",
"result",
".",
"append",
"(",
"(",
"file",
",",
"-",
"1",
",",
"-",
"1",
",",
"'note'",
",",
"'In class \"{}\":'",
".",
"format",
"(",
"e",
".",
"type",
")",
",",
"e",
".",
"allow_dups",
",",
"None",
")",
")",
"if",
"isinstance",
"(",
"e",
".",
"message",
",",
"ErrorMessage",
")",
":",
"result",
".",
"append",
"(",
"(",
"file",
",",
"e",
".",
"line",
",",
"e",
".",
"column",
",",
"e",
".",
"severity",
",",
"e",
".",
"message",
".",
"value",
",",
"e",
".",
"allow_dups",
",",
"e",
".",
"code",
")",
")",
"else",
":",
"result",
".",
"append",
"(",
"(",
"file",
",",
"e",
".",
"line",
",",
"e",
".",
"column",
",",
"e",
".",
"severity",
",",
"e",
".",
"message",
",",
"e",
".",
"allow_dups",
",",
"e",
".",
"code",
")",
")",
"prev_import_context",
"=",
"e",
".",
"import_ctx",
"prev_function_or_member",
"=",
"e",
".",
"function_or_member",
"prev_type",
"=",
"e",
".",
"type",
"return",
"result"
] |
https://github.com/python/mypy/blob/17850b3bd77ae9efb5d21f656c4e4e05ac48d894/mypy/errors.py#L615-L692
|
|
tensorflow/models
|
6b8bb0cbeb3e10415c7a87448f08adc3c484c1d3
|
research/lfads/synth_data/synthetic_data_utils.py
|
python
|
nparray_and_transpose
|
(data_a_b_c)
|
return data_axcxb
|
Convert the list of items in data to a numpy array, and transpose it
Args:
data: data_asbsc: a nested, nested list of length a, with sublist length
b, with sublist length c.
Returns:
a numpy 3-tensor with dimensions a x c x b
|
Convert the list of items in data to a numpy array, and transpose it
Args:
data: data_asbsc: a nested, nested list of length a, with sublist length
b, with sublist length c.
Returns:
a numpy 3-tensor with dimensions a x c x b
|
[
"Convert",
"the",
"list",
"of",
"items",
"in",
"data",
"to",
"a",
"numpy",
"array",
"and",
"transpose",
"it",
"Args",
":",
"data",
":",
"data_asbsc",
":",
"a",
"nested",
"nested",
"list",
"of",
"length",
"a",
"with",
"sublist",
"length",
"b",
"with",
"sublist",
"length",
"c",
".",
"Returns",
":",
"a",
"numpy",
"3",
"-",
"tensor",
"with",
"dimensions",
"a",
"x",
"c",
"x",
"b"
] |
def nparray_and_transpose(data_a_b_c):
"""Convert the list of items in data to a numpy array, and transpose it
Args:
data: data_asbsc: a nested, nested list of length a, with sublist length
b, with sublist length c.
Returns:
a numpy 3-tensor with dimensions a x c x b
"""
data_axbxc = np.array([datum_b_c for datum_b_c in data_a_b_c])
data_axcxb = np.transpose(data_axbxc, axes=[0,2,1])
return data_axcxb
|
[
"def",
"nparray_and_transpose",
"(",
"data_a_b_c",
")",
":",
"data_axbxc",
"=",
"np",
".",
"array",
"(",
"[",
"datum_b_c",
"for",
"datum_b_c",
"in",
"data_a_b_c",
"]",
")",
"data_axcxb",
"=",
"np",
".",
"transpose",
"(",
"data_axbxc",
",",
"axes",
"=",
"[",
"0",
",",
"2",
",",
"1",
"]",
")",
"return",
"data_axcxb"
] |
https://github.com/tensorflow/models/blob/6b8bb0cbeb3e10415c7a87448f08adc3c484c1d3/research/lfads/synth_data/synthetic_data_utils.py#L220-L230
|
|
bruderstein/PythonScript
|
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
|
PythonLib/full/genericpath.py
|
python
|
samefile
|
(f1, f2)
|
return samestat(s1, s2)
|
Test whether two pathnames reference the same actual file or directory
This is determined by the device number and i-node number and
raises an exception if an os.stat() call on either pathname fails.
|
Test whether two pathnames reference the same actual file or directory
|
[
"Test",
"whether",
"two",
"pathnames",
"reference",
"the",
"same",
"actual",
"file",
"or",
"directory"
] |
def samefile(f1, f2):
"""Test whether two pathnames reference the same actual file or directory
This is determined by the device number and i-node number and
raises an exception if an os.stat() call on either pathname fails.
"""
s1 = os.stat(f1)
s2 = os.stat(f2)
return samestat(s1, s2)
|
[
"def",
"samefile",
"(",
"f1",
",",
"f2",
")",
":",
"s1",
"=",
"os",
".",
"stat",
"(",
"f1",
")",
"s2",
"=",
"os",
".",
"stat",
"(",
"f2",
")",
"return",
"samestat",
"(",
"s1",
",",
"s2",
")"
] |
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/full/genericpath.py#L94-L102
|
|
yourtion/DataminingGuideBook-Codes
|
ff8f41b3b5faa3b584475f92f60ed3f7613869b8
|
chapter-4/classifyTemplate.py
|
python
|
Classifier.getAbsoluteStandardDeviation
|
(self, alist, median)
|
return sum / len(alist)
|
given alist and median return absolute standard deviation
|
given alist and median return absolute standard deviation
|
[
"given",
"alist",
"and",
"median",
"return",
"absolute",
"standard",
"deviation"
] |
def getAbsoluteStandardDeviation(self, alist, median):
"""given alist and median return absolute standard deviation"""
sum = 0
for item in alist:
sum += abs(item - median)
return sum / len(alist)
|
[
"def",
"getAbsoluteStandardDeviation",
"(",
"self",
",",
"alist",
",",
"median",
")",
":",
"sum",
"=",
"0",
"for",
"item",
"in",
"alist",
":",
"sum",
"+=",
"abs",
"(",
"item",
"-",
"median",
")",
"return",
"sum",
"/",
"len",
"(",
"alist",
")"
] |
https://github.com/yourtion/DataminingGuideBook-Codes/blob/ff8f41b3b5faa3b584475f92f60ed3f7613869b8/chapter-4/classifyTemplate.py#L68-L73
|
|
fake-name/ReadableWebProxy
|
ed5c7abe38706acc2684a1e6cd80242a03c5f010
|
WebMirror/management/rss_parser_funcs/feed_parse_extractTrashbunnyTumblrCom.py
|
python
|
extractTrashbunnyTumblrCom
|
(item)
|
return False
|
Parser for 'trashbunny.tumblr.com'
|
Parser for 'trashbunny.tumblr.com'
|
[
"Parser",
"for",
"trashbunny",
".",
"tumblr",
".",
"com"
] |
def extractTrashbunnyTumblrCom(item):
'''
Parser for 'trashbunny.tumblr.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
[
"def",
"extractTrashbunnyTumblrCom",
"(",
"item",
")",
":",
"vol",
",",
"chp",
",",
"frag",
",",
"postfix",
"=",
"extractVolChapterFragmentPostfix",
"(",
"item",
"[",
"'title'",
"]",
")",
"if",
"not",
"(",
"chp",
"or",
"vol",
")",
"or",
"\"preview\"",
"in",
"item",
"[",
"'title'",
"]",
".",
"lower",
"(",
")",
":",
"return",
"None",
"tagmap",
"=",
"[",
"(",
"'PRC'",
",",
"'PRC'",
",",
"'translated'",
")",
",",
"(",
"'Loiterous'",
",",
"'Loiterous'",
",",
"'oel'",
")",
",",
"]",
"for",
"tagname",
",",
"name",
",",
"tl_type",
"in",
"tagmap",
":",
"if",
"tagname",
"in",
"item",
"[",
"'tags'",
"]",
":",
"return",
"buildReleaseMessageWithType",
"(",
"item",
",",
"name",
",",
"vol",
",",
"chp",
",",
"frag",
"=",
"frag",
",",
"postfix",
"=",
"postfix",
",",
"tl_type",
"=",
"tl_type",
")",
"return",
"False"
] |
https://github.com/fake-name/ReadableWebProxy/blob/ed5c7abe38706acc2684a1e6cd80242a03c5f010/WebMirror/management/rss_parser_funcs/feed_parse_extractTrashbunnyTumblrCom.py#L2-L21
|
|
koniu/recoll-webui
|
c3151abc2b4416fb4a63b7833b8c99d17e3ace6e
|
bottle.py
|
python
|
HooksPlugin.add
|
(self, name, func)
|
Attach a callback to a hook.
|
Attach a callback to a hook.
|
[
"Attach",
"a",
"callback",
"to",
"a",
"hook",
"."
] |
def add(self, name, func):
''' Attach a callback to a hook. '''
was_empty = self._empty()
self.hooks.setdefault(name, []).append(func)
if self.app and was_empty and not self._empty(): self.app.reset()
|
[
"def",
"add",
"(",
"self",
",",
"name",
",",
"func",
")",
":",
"was_empty",
"=",
"self",
".",
"_empty",
"(",
")",
"self",
".",
"hooks",
".",
"setdefault",
"(",
"name",
",",
"[",
"]",
")",
".",
"append",
"(",
"func",
")",
"if",
"self",
".",
"app",
"and",
"was_empty",
"and",
"not",
"self",
".",
"_empty",
"(",
")",
":",
"self",
".",
"app",
".",
"reset",
"(",
")"
] |
https://github.com/koniu/recoll-webui/blob/c3151abc2b4416fb4a63b7833b8c99d17e3ace6e/bottle.py#L1506-L1510
|
||
missionpinball/mpf
|
8e6b74cff4ba06d2fec9445742559c1068b88582
|
mpf/devices/logic_blocks.py
|
python
|
Counter.validate_and_parse_config
|
(self, config: dict, is_mode_config: bool, debug_prefix: str = None)
|
return super().validate_and_parse_config(config, is_mode_config, debug_prefix)
|
Validate logic block config.
|
Validate logic block config.
|
[
"Validate",
"logic",
"block",
"config",
"."
] |
def validate_and_parse_config(self, config: dict, is_mode_config: bool, debug_prefix: str = None) -> dict:
"""Validate logic block config."""
if 'events_when_hit' not in config:
# for compatibility post the same default as previously for
# counters. This one is deprecated.
config['events_when_hit'] = ['counter_' + self.name + '_hit']
# this is the one moving forward
config['events_when_hit'].append('logicblock_' + self.name + '_hit')
return super().validate_and_parse_config(config, is_mode_config, debug_prefix)
|
[
"def",
"validate_and_parse_config",
"(",
"self",
",",
"config",
":",
"dict",
",",
"is_mode_config",
":",
"bool",
",",
"debug_prefix",
":",
"str",
"=",
"None",
")",
"->",
"dict",
":",
"if",
"'events_when_hit'",
"not",
"in",
"config",
":",
"# for compatibility post the same default as previously for",
"# counters. This one is deprecated.",
"config",
"[",
"'events_when_hit'",
"]",
"=",
"[",
"'counter_'",
"+",
"self",
".",
"name",
"+",
"'_hit'",
"]",
"# this is the one moving forward",
"config",
"[",
"'events_when_hit'",
"]",
".",
"append",
"(",
"'logicblock_'",
"+",
"self",
".",
"name",
"+",
"'_hit'",
")",
"return",
"super",
"(",
")",
".",
"validate_and_parse_config",
"(",
"config",
",",
"is_mode_config",
",",
"debug_prefix",
")"
] |
https://github.com/missionpinball/mpf/blob/8e6b74cff4ba06d2fec9445742559c1068b88582/mpf/devices/logic_blocks.py#L466-L476
|
|
Spacelog/Spacelog
|
92df308be5923765607a89b022acb57c041c86b3
|
ext/xappy-0.5-sja-1/xappy/searchconnection.py
|
python
|
SearchConnection.reopen
|
(self)
|
Reopen the connection.
This updates the revision of the index which the connection references
to the latest flushed revision.
|
Reopen the connection.
|
[
"Reopen",
"the",
"connection",
"."
] |
def reopen(self):
"""Reopen the connection.
This updates the revision of the index which the connection references
to the latest flushed revision.
"""
if self._index is None:
raise _errors.SearchError("SearchConnection has been closed")
self._index.reopen()
# Re-read the actions.
self._load_config()
|
[
"def",
"reopen",
"(",
"self",
")",
":",
"if",
"self",
".",
"_index",
"is",
"None",
":",
"raise",
"_errors",
".",
"SearchError",
"(",
"\"SearchConnection has been closed\"",
")",
"self",
".",
"_index",
".",
"reopen",
"(",
")",
"# Re-read the actions.",
"self",
".",
"_load_config",
"(",
")"
] |
https://github.com/Spacelog/Spacelog/blob/92df308be5923765607a89b022acb57c041c86b3/ext/xappy-0.5-sja-1/xappy/searchconnection.py#L825-L836
|
||
misterch0c/shadowbroker
|
e3a069bea47a2c1009697941ac214adc6f90aa8d
|
windows/Resources/Python/Core/Lib/collections.py
|
python
|
Counter.copy
|
(self)
|
return self.__class__(self)
|
Return a shallow copy.
|
Return a shallow copy.
|
[
"Return",
"a",
"shallow",
"copy",
"."
] |
def copy(self):
"""Return a shallow copy."""
return self.__class__(self)
|
[
"def",
"copy",
"(",
"self",
")",
":",
"return",
"self",
".",
"__class__",
"(",
"self",
")"
] |
https://github.com/misterch0c/shadowbroker/blob/e3a069bea47a2c1009697941ac214adc6f90aa8d/windows/Resources/Python/Core/Lib/collections.py#L456-L458
|
|
Source-Python-Dev-Team/Source.Python
|
d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb
|
addons/source-python/packages/site-packages/docutils/nodes.py
|
python
|
Element.copy_attr_coerce
|
(self, attr, value, replace)
|
If attr is an attribute of self and either self[attr] or value is a
list, convert all non-sequence values to a sequence of 1 element and
then concatenate the two sequence, setting the result to self[attr].
If both self[attr] and value are non-sequences and replace is True or
self[attr] is None, replace self[attr] with value. Otherwise, do
nothing.
|
If attr is an attribute of self and either self[attr] or value is a
list, convert all non-sequence values to a sequence of 1 element and
then concatenate the two sequence, setting the result to self[attr].
If both self[attr] and value are non-sequences and replace is True or
self[attr] is None, replace self[attr] with value. Otherwise, do
nothing.
|
[
"If",
"attr",
"is",
"an",
"attribute",
"of",
"self",
"and",
"either",
"self",
"[",
"attr",
"]",
"or",
"value",
"is",
"a",
"list",
"convert",
"all",
"non",
"-",
"sequence",
"values",
"to",
"a",
"sequence",
"of",
"1",
"element",
"and",
"then",
"concatenate",
"the",
"two",
"sequence",
"setting",
"the",
"result",
"to",
"self",
"[",
"attr",
"]",
".",
"If",
"both",
"self",
"[",
"attr",
"]",
"and",
"value",
"are",
"non",
"-",
"sequences",
"and",
"replace",
"is",
"True",
"or",
"self",
"[",
"attr",
"]",
"is",
"None",
"replace",
"self",
"[",
"attr",
"]",
"with",
"value",
".",
"Otherwise",
"do",
"nothing",
"."
] |
def copy_attr_coerce(self, attr, value, replace):
"""
If attr is an attribute of self and either self[attr] or value is a
list, convert all non-sequence values to a sequence of 1 element and
then concatenate the two sequence, setting the result to self[attr].
If both self[attr] and value are non-sequences and replace is True or
self[attr] is None, replace self[attr] with value. Otherwise, do
nothing.
"""
if self.get(attr) is not value:
if isinstance(self.get(attr), list) or \
isinstance(value, list):
self.coerce_append_attr_list(attr, value)
else:
self.replace_attr(attr, value, replace)
|
[
"def",
"copy_attr_coerce",
"(",
"self",
",",
"attr",
",",
"value",
",",
"replace",
")",
":",
"if",
"self",
".",
"get",
"(",
"attr",
")",
"is",
"not",
"value",
":",
"if",
"isinstance",
"(",
"self",
".",
"get",
"(",
"attr",
")",
",",
"list",
")",
"or",
"isinstance",
"(",
"value",
",",
"list",
")",
":",
"self",
".",
"coerce_append_attr_list",
"(",
"attr",
",",
"value",
")",
"else",
":",
"self",
".",
"replace_attr",
"(",
"attr",
",",
"value",
",",
"replace",
")"
] |
https://github.com/Source-Python-Dev-Team/Source.Python/blob/d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb/addons/source-python/packages/site-packages/docutils/nodes.py#L755-L769
|
||
nltk/nltk
|
3f74ac55681667d7ef78b664557487145f51eb02
|
nltk/classify/api.py
|
python
|
MultiClassifierI.labels
|
(self)
|
:return: the list of category labels used by this classifier.
:rtype: list of (immutable)
|
:return: the list of category labels used by this classifier.
:rtype: list of (immutable)
|
[
":",
"return",
":",
"the",
"list",
"of",
"category",
"labels",
"used",
"by",
"this",
"classifier",
".",
":",
"rtype",
":",
"list",
"of",
"(",
"immutable",
")"
] |
def labels(self):
"""
:return: the list of category labels used by this classifier.
:rtype: list of (immutable)
"""
raise NotImplementedError()
|
[
"def",
"labels",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] |
https://github.com/nltk/nltk/blob/3f74ac55681667d7ef78b664557487145f51eb02/nltk/classify/api.py#L107-L112
|
||
hardmaru/resnet-cppn-gan-tensorflow
|
9206e06512c118e932fbc789c91a5cf4f9e5d2b9
|
images2gif.py
|
python
|
NeuQuant.fix
|
(self)
|
[] |
def fix(self):
for i in range(self.NETSIZE):
for j in range(3):
x = int(0.5 + self.network[i,j])
x = max(0, x)
x = min(255, x)
self.colormap[i,j] = x
self.colormap[i,3] = i
|
[
"def",
"fix",
"(",
"self",
")",
":",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"NETSIZE",
")",
":",
"for",
"j",
"in",
"range",
"(",
"3",
")",
":",
"x",
"=",
"int",
"(",
"0.5",
"+",
"self",
".",
"network",
"[",
"i",
",",
"j",
"]",
")",
"x",
"=",
"max",
"(",
"0",
",",
"x",
")",
"x",
"=",
"min",
"(",
"255",
",",
"x",
")",
"self",
".",
"colormap",
"[",
"i",
",",
"j",
"]",
"=",
"x",
"self",
".",
"colormap",
"[",
"i",
",",
"3",
"]",
"=",
"i"
] |
https://github.com/hardmaru/resnet-cppn-gan-tensorflow/blob/9206e06512c118e932fbc789c91a5cf4f9e5d2b9/images2gif.py#L959-L966
|
||||
pypa/pipenv
|
b21baade71a86ab3ee1429f71fbc14d4f95fb75d
|
pipenv/patched/notpip/_vendor/distlib/util.py
|
python
|
Progress.maximum
|
(self)
|
return self.unknown if self.max is None else self.max
|
[] |
def maximum(self):
return self.unknown if self.max is None else self.max
|
[
"def",
"maximum",
"(",
"self",
")",
":",
"return",
"self",
".",
"unknown",
"if",
"self",
".",
"max",
"is",
"None",
"else",
"self",
".",
"max"
] |
https://github.com/pypa/pipenv/blob/b21baade71a86ab3ee1429f71fbc14d4f95fb75d/pipenv/patched/notpip/_vendor/distlib/util.py#L1310-L1311
|
|||
dropbox/pyannotate
|
a7a46f394f0ba91a1b5fbf657e2393af542969ae
|
pyannotate_tools/annotations/infer.py
|
python
|
infer_annotation
|
(type_comments)
|
return combined_args, combined_return
|
Given some type comments, return a single inferred signature.
Args:
type_comments: Strings of form '(arg1, ... argN) -> ret'
Returns: Tuple of (argument types and kinds, return type).
|
Given some type comments, return a single inferred signature.
|
[
"Given",
"some",
"type",
"comments",
"return",
"a",
"single",
"inferred",
"signature",
"."
] |
def infer_annotation(type_comments):
# type: (List[str]) -> Tuple[List[Argument], AbstractType]
"""Given some type comments, return a single inferred signature.
Args:
type_comments: Strings of form '(arg1, ... argN) -> ret'
Returns: Tuple of (argument types and kinds, return type).
"""
assert type_comments
args = {} # type: Dict[int, Set[Argument]]
returns = set()
for comment in type_comments:
arg_types, return_type = parse_type_comment(comment)
for i, arg_type in enumerate(arg_types):
args.setdefault(i, set()).add(arg_type)
returns.add(return_type)
combined_args = []
for i in sorted(args):
arg_infos = list(args[i])
kind = argument_kind(arg_infos)
if kind is None:
raise InferError('Ambiguous argument kinds:\n' + '\n'.join(type_comments))
types = [arg.type for arg in arg_infos]
combined = combine_types(types)
if str(combined) == 'None':
# It's very rare for an argument to actually be typed `None`, more likely than
# not we simply don't have any data points for this argument.
combined = UnionType([ClassType('None'), AnyType()])
if kind != ARG_POS and (len(str(combined)) > 120 or isinstance(combined, UnionType)):
# Avoid some noise.
combined = AnyType()
combined_args.append(Argument(combined, kind))
combined_return = combine_types(returns)
return combined_args, combined_return
|
[
"def",
"infer_annotation",
"(",
"type_comments",
")",
":",
"# type: (List[str]) -> Tuple[List[Argument], AbstractType]",
"assert",
"type_comments",
"args",
"=",
"{",
"}",
"# type: Dict[int, Set[Argument]]",
"returns",
"=",
"set",
"(",
")",
"for",
"comment",
"in",
"type_comments",
":",
"arg_types",
",",
"return_type",
"=",
"parse_type_comment",
"(",
"comment",
")",
"for",
"i",
",",
"arg_type",
"in",
"enumerate",
"(",
"arg_types",
")",
":",
"args",
".",
"setdefault",
"(",
"i",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"arg_type",
")",
"returns",
".",
"add",
"(",
"return_type",
")",
"combined_args",
"=",
"[",
"]",
"for",
"i",
"in",
"sorted",
"(",
"args",
")",
":",
"arg_infos",
"=",
"list",
"(",
"args",
"[",
"i",
"]",
")",
"kind",
"=",
"argument_kind",
"(",
"arg_infos",
")",
"if",
"kind",
"is",
"None",
":",
"raise",
"InferError",
"(",
"'Ambiguous argument kinds:\\n'",
"+",
"'\\n'",
".",
"join",
"(",
"type_comments",
")",
")",
"types",
"=",
"[",
"arg",
".",
"type",
"for",
"arg",
"in",
"arg_infos",
"]",
"combined",
"=",
"combine_types",
"(",
"types",
")",
"if",
"str",
"(",
"combined",
")",
"==",
"'None'",
":",
"# It's very rare for an argument to actually be typed `None`, more likely than",
"# not we simply don't have any data points for this argument.",
"combined",
"=",
"UnionType",
"(",
"[",
"ClassType",
"(",
"'None'",
")",
",",
"AnyType",
"(",
")",
"]",
")",
"if",
"kind",
"!=",
"ARG_POS",
"and",
"(",
"len",
"(",
"str",
"(",
"combined",
")",
")",
">",
"120",
"or",
"isinstance",
"(",
"combined",
",",
"UnionType",
")",
")",
":",
"# Avoid some noise.",
"combined",
"=",
"AnyType",
"(",
")",
"combined_args",
".",
"append",
"(",
"Argument",
"(",
"combined",
",",
"kind",
")",
")",
"combined_return",
"=",
"combine_types",
"(",
"returns",
")",
"return",
"combined_args",
",",
"combined_return"
] |
https://github.com/dropbox/pyannotate/blob/a7a46f394f0ba91a1b5fbf657e2393af542969ae/pyannotate_tools/annotations/infer.py#L32-L66
|
|
serengil/deepface
|
6b48b008fd1ee13c7359baedc878dc88b2216c45
|
deepface/basemodels/ArcFace.py
|
python
|
stack1
|
(x, filters, blocks, stride1=2, name=None)
|
return x
|
[] |
def stack1(x, filters, blocks, stride1=2, name=None):
x = block1(x, filters, stride=stride1, name=name + '_block1')
for i in range(2, blocks + 1):
x = block1(x, filters, conv_shortcut=False, name=name + '_block' + str(i))
return x
|
[
"def",
"stack1",
"(",
"x",
",",
"filters",
",",
"blocks",
",",
"stride1",
"=",
"2",
",",
"name",
"=",
"None",
")",
":",
"x",
"=",
"block1",
"(",
"x",
",",
"filters",
",",
"stride",
"=",
"stride1",
",",
"name",
"=",
"name",
"+",
"'_block1'",
")",
"for",
"i",
"in",
"range",
"(",
"2",
",",
"blocks",
"+",
"1",
")",
":",
"x",
"=",
"block1",
"(",
"x",
",",
"filters",
",",
"conv_shortcut",
"=",
"False",
",",
"name",
"=",
"name",
"+",
"'_block'",
"+",
"str",
"(",
"i",
")",
")",
"return",
"x"
] |
https://github.com/serengil/deepface/blob/6b48b008fd1ee13c7359baedc878dc88b2216c45/deepface/basemodels/ArcFace.py#L83-L87
|
|||
regisb/slack-cli
|
a9c9140e32e23de1c2add89a42ee6363b811b6f6
|
slackcli/emoji.py
|
python
|
emojize
|
(text)
|
return result
|
Replace the :short_codes: with their corresponding unicode values. Avoid
replacing short codes inside verbatim tick (`) marks.
|
Replace the :short_codes: with their corresponding unicode values. Avoid
replacing short codes inside verbatim tick (`) marks.
|
[
"Replace",
"the",
":",
"short_codes",
":",
"with",
"their",
"corresponding",
"unicode",
"values",
".",
"Avoid",
"replacing",
"short",
"codes",
"inside",
"verbatim",
"tick",
"(",
")",
"marks",
"."
] |
def emojize(text):
"""
Replace the :short_codes: with their corresponding unicode values. Avoid
replacing short codes inside verbatim tick (`) marks.
"""
if not USE_EMOJIS:
return text
pos = 0
result = ""
verbatim = False
verbatim_block = False
while pos < len(text):
chunk = text[pos]
if text[pos] == "`":
if text[pos + 1 : pos + 3] == "``":
verbatim_block = not verbatim_block
if not verbatim_block:
verbatim = not verbatim
if text[pos] == ":" and not verbatim and not verbatim_block:
end_pos = text.find(":", pos + 1)
if end_pos > pos + 1:
emoji = Emojis.get(text[pos + 1 : end_pos])
if emoji:
chunk = emoji
pos = end_pos
result += chunk
pos += 1
return result
|
[
"def",
"emojize",
"(",
"text",
")",
":",
"if",
"not",
"USE_EMOJIS",
":",
"return",
"text",
"pos",
"=",
"0",
"result",
"=",
"\"\"",
"verbatim",
"=",
"False",
"verbatim_block",
"=",
"False",
"while",
"pos",
"<",
"len",
"(",
"text",
")",
":",
"chunk",
"=",
"text",
"[",
"pos",
"]",
"if",
"text",
"[",
"pos",
"]",
"==",
"\"`\"",
":",
"if",
"text",
"[",
"pos",
"+",
"1",
":",
"pos",
"+",
"3",
"]",
"==",
"\"``\"",
":",
"verbatim_block",
"=",
"not",
"verbatim_block",
"if",
"not",
"verbatim_block",
":",
"verbatim",
"=",
"not",
"verbatim",
"if",
"text",
"[",
"pos",
"]",
"==",
"\":\"",
"and",
"not",
"verbatim",
"and",
"not",
"verbatim_block",
":",
"end_pos",
"=",
"text",
".",
"find",
"(",
"\":\"",
",",
"pos",
"+",
"1",
")",
"if",
"end_pos",
">",
"pos",
"+",
"1",
":",
"emoji",
"=",
"Emojis",
".",
"get",
"(",
"text",
"[",
"pos",
"+",
"1",
":",
"end_pos",
"]",
")",
"if",
"emoji",
":",
"chunk",
"=",
"emoji",
"pos",
"=",
"end_pos",
"result",
"+=",
"chunk",
"pos",
"+=",
"1",
"return",
"result"
] |
https://github.com/regisb/slack-cli/blob/a9c9140e32e23de1c2add89a42ee6363b811b6f6/slackcli/emoji.py#L48-L76
|
|
QCoDeS/Qcodes
|
3cda2cef44812e2aa4672781f2423bf5f816f9f9
|
qcodes/loops.py
|
python
|
Loop.loop
|
(self, sweep_values, delay=0)
|
return out
|
Nest another loop inside this one.
Args:
sweep_values:
delay (int):
Examples:
>>> Loop(sv1, d1).loop(sv2, d2).each(*a)
is equivalent to:
>>> Loop(sv1, d1).each(Loop(sv2, d2).each(*a))
Returns: a new Loop object - the original is untouched
|
Nest another loop inside this one.
|
[
"Nest",
"another",
"loop",
"inside",
"this",
"one",
"."
] |
def loop(self, sweep_values, delay=0):
"""
Nest another loop inside this one.
Args:
sweep_values:
delay (int):
Examples:
>>> Loop(sv1, d1).loop(sv2, d2).each(*a)
is equivalent to:
>>> Loop(sv1, d1).each(Loop(sv2, d2).each(*a))
Returns: a new Loop object - the original is untouched
"""
out = self._copy()
if out.nested_loop:
# nest this new loop inside the deepest level
out.nested_loop = out.nested_loop.loop(sweep_values, delay)
else:
out.nested_loop = Loop(sweep_values, delay)
return out
|
[
"def",
"loop",
"(",
"self",
",",
"sweep_values",
",",
"delay",
"=",
"0",
")",
":",
"out",
"=",
"self",
".",
"_copy",
"(",
")",
"if",
"out",
".",
"nested_loop",
":",
"# nest this new loop inside the deepest level",
"out",
".",
"nested_loop",
"=",
"out",
".",
"nested_loop",
".",
"loop",
"(",
"sweep_values",
",",
"delay",
")",
"else",
":",
"out",
".",
"nested_loop",
"=",
"Loop",
"(",
"sweep_values",
",",
"delay",
")",
"return",
"out"
] |
https://github.com/QCoDeS/Qcodes/blob/3cda2cef44812e2aa4672781f2423bf5f816f9f9/qcodes/loops.py#L130-L155
|
|
microsoft/MMdnn
|
19562a381c27545984a216eda7591430e274e518
|
mmdnn/conversion/examples/tensorflow/models/inception_resnet_v2.py
|
python
|
inception_resnet_v2
|
(inputs, num_classes=1001, is_training=True,
dropout_keep_prob=0.8,
reuse=None,
scope='InceptionResnetV2',
create_aux_logits=True,
activation_fn=tf.nn.relu)
|
Creates the Inception Resnet V2 model.
Args:
inputs: a 4-D tensor of size [batch_size, height, width, 3].
Dimension batch_size may be undefined. If create_aux_logits is false,
also height and width may be undefined.
num_classes: number of predicted classes. If 0 or None, the logits layer
is omitted and the input features to the logits layer (before dropout)
are returned instead.
is_training: whether is training or not.
dropout_keep_prob: float, the fraction to keep before final layer.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
create_aux_logits: Whether to include the auxilliary logits.
activation_fn: Activation function for conv2d.
Returns:
net: the output of the logits layer (if num_classes is a non-zero integer),
or the non-dropped-out input to the logits layer (if num_classes is 0 or
None).
end_points: the set of end_points from the inception model.
|
Creates the Inception Resnet V2 model.
|
[
"Creates",
"the",
"Inception",
"Resnet",
"V2",
"model",
"."
] |
def inception_resnet_v2(inputs, num_classes=1001, is_training=True,
dropout_keep_prob=0.8,
reuse=None,
scope='InceptionResnetV2',
create_aux_logits=True,
activation_fn=tf.nn.relu):
"""Creates the Inception Resnet V2 model.
Args:
inputs: a 4-D tensor of size [batch_size, height, width, 3].
Dimension batch_size may be undefined. If create_aux_logits is false,
also height and width may be undefined.
num_classes: number of predicted classes. If 0 or None, the logits layer
is omitted and the input features to the logits layer (before dropout)
are returned instead.
is_training: whether is training or not.
dropout_keep_prob: float, the fraction to keep before final layer.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
create_aux_logits: Whether to include the auxilliary logits.
activation_fn: Activation function for conv2d.
Returns:
net: the output of the logits layer (if num_classes is a non-zero integer),
or the non-dropped-out input to the logits layer (if num_classes is 0 or
None).
end_points: the set of end_points from the inception model.
"""
end_points = {}
with tf.variable_scope(scope, 'InceptionResnetV2', [inputs],
reuse=reuse) as scope:
with slim.arg_scope([slim.batch_norm, slim.dropout],
is_training=is_training):
net, end_points = inception_resnet_v2_base(inputs, scope=scope,
activation_fn=activation_fn)
if create_aux_logits and num_classes:
with tf.variable_scope('AuxLogits'):
aux = end_points['PreAuxLogits']
aux = slim.avg_pool2d(aux, 5, stride=3, padding='VALID',
scope='Conv2d_1a_3x3')
aux = slim.conv2d(aux, 128, 1, scope='Conv2d_1b_1x1')
aux = slim.conv2d(aux, 768, aux.get_shape()[1:3],
padding='VALID', scope='Conv2d_2a_5x5')
aux = slim.flatten(aux)
aux = slim.fully_connected(aux, num_classes, activation_fn=None,
scope='Logits')
end_points['AuxLogits'] = aux
with tf.variable_scope('Logits'):
# TODO(sguada,arnoegw): Consider adding a parameter global_pool which
# can be set to False to disable pooling here (as in resnet_*()).
kernel_size = net.get_shape()[1:3]
if kernel_size.is_fully_defined():
net = slim.avg_pool2d(net, kernel_size, padding='VALID',
scope='AvgPool_1a_8x8')
else:
net = tf.reduce_mean(net, [1, 2], keep_dims=True, name='global_pool')
end_points['global_pool'] = net
if not num_classes:
return net, end_points
net = slim.flatten(net)
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='Dropout')
end_points['PreLogitsFlatten'] = net
logits = slim.fully_connected(net, num_classes, activation_fn=None,
scope='Logits')
end_points['Logits'] = logits
end_points['Predictions'] = tf.nn.softmax(logits, name='Predictions')
return logits, end_points
|
[
"def",
"inception_resnet_v2",
"(",
"inputs",
",",
"num_classes",
"=",
"1001",
",",
"is_training",
"=",
"True",
",",
"dropout_keep_prob",
"=",
"0.8",
",",
"reuse",
"=",
"None",
",",
"scope",
"=",
"'InceptionResnetV2'",
",",
"create_aux_logits",
"=",
"True",
",",
"activation_fn",
"=",
"tf",
".",
"nn",
".",
"relu",
")",
":",
"end_points",
"=",
"{",
"}",
"with",
"tf",
".",
"variable_scope",
"(",
"scope",
",",
"'InceptionResnetV2'",
",",
"[",
"inputs",
"]",
",",
"reuse",
"=",
"reuse",
")",
"as",
"scope",
":",
"with",
"slim",
".",
"arg_scope",
"(",
"[",
"slim",
".",
"batch_norm",
",",
"slim",
".",
"dropout",
"]",
",",
"is_training",
"=",
"is_training",
")",
":",
"net",
",",
"end_points",
"=",
"inception_resnet_v2_base",
"(",
"inputs",
",",
"scope",
"=",
"scope",
",",
"activation_fn",
"=",
"activation_fn",
")",
"if",
"create_aux_logits",
"and",
"num_classes",
":",
"with",
"tf",
".",
"variable_scope",
"(",
"'AuxLogits'",
")",
":",
"aux",
"=",
"end_points",
"[",
"'PreAuxLogits'",
"]",
"aux",
"=",
"slim",
".",
"avg_pool2d",
"(",
"aux",
",",
"5",
",",
"stride",
"=",
"3",
",",
"padding",
"=",
"'VALID'",
",",
"scope",
"=",
"'Conv2d_1a_3x3'",
")",
"aux",
"=",
"slim",
".",
"conv2d",
"(",
"aux",
",",
"128",
",",
"1",
",",
"scope",
"=",
"'Conv2d_1b_1x1'",
")",
"aux",
"=",
"slim",
".",
"conv2d",
"(",
"aux",
",",
"768",
",",
"aux",
".",
"get_shape",
"(",
")",
"[",
"1",
":",
"3",
"]",
",",
"padding",
"=",
"'VALID'",
",",
"scope",
"=",
"'Conv2d_2a_5x5'",
")",
"aux",
"=",
"slim",
".",
"flatten",
"(",
"aux",
")",
"aux",
"=",
"slim",
".",
"fully_connected",
"(",
"aux",
",",
"num_classes",
",",
"activation_fn",
"=",
"None",
",",
"scope",
"=",
"'Logits'",
")",
"end_points",
"[",
"'AuxLogits'",
"]",
"=",
"aux",
"with",
"tf",
".",
"variable_scope",
"(",
"'Logits'",
")",
":",
"# TODO(sguada,arnoegw): Consider adding a parameter global_pool which",
"# can be set to False to disable pooling here (as in resnet_*()).",
"kernel_size",
"=",
"net",
".",
"get_shape",
"(",
")",
"[",
"1",
":",
"3",
"]",
"if",
"kernel_size",
".",
"is_fully_defined",
"(",
")",
":",
"net",
"=",
"slim",
".",
"avg_pool2d",
"(",
"net",
",",
"kernel_size",
",",
"padding",
"=",
"'VALID'",
",",
"scope",
"=",
"'AvgPool_1a_8x8'",
")",
"else",
":",
"net",
"=",
"tf",
".",
"reduce_mean",
"(",
"net",
",",
"[",
"1",
",",
"2",
"]",
",",
"keep_dims",
"=",
"True",
",",
"name",
"=",
"'global_pool'",
")",
"end_points",
"[",
"'global_pool'",
"]",
"=",
"net",
"if",
"not",
"num_classes",
":",
"return",
"net",
",",
"end_points",
"net",
"=",
"slim",
".",
"flatten",
"(",
"net",
")",
"net",
"=",
"slim",
".",
"dropout",
"(",
"net",
",",
"dropout_keep_prob",
",",
"is_training",
"=",
"is_training",
",",
"scope",
"=",
"'Dropout'",
")",
"end_points",
"[",
"'PreLogitsFlatten'",
"]",
"=",
"net",
"logits",
"=",
"slim",
".",
"fully_connected",
"(",
"net",
",",
"num_classes",
",",
"activation_fn",
"=",
"None",
",",
"scope",
"=",
"'Logits'",
")",
"end_points",
"[",
"'Logits'",
"]",
"=",
"logits",
"end_points",
"[",
"'Predictions'",
"]",
"=",
"tf",
".",
"nn",
".",
"softmax",
"(",
"logits",
",",
"name",
"=",
"'Predictions'",
")",
"return",
"logits",
",",
"end_points"
] |
https://github.com/microsoft/MMdnn/blob/19562a381c27545984a216eda7591430e274e518/mmdnn/conversion/examples/tensorflow/models/inception_resnet_v2.py#L291-L364
|
||
tp4a/teleport
|
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
|
server/www/packages/packages-windows/x86/idna/uts46data.py
|
python
|
_seg_54
|
()
|
return [
(0x104C0, 'M', u'𐓨'),
(0x104C1, 'M', u'𐓩'),
(0x104C2, 'M', u'𐓪'),
(0x104C3, 'M', u'𐓫'),
(0x104C4, 'M', u'𐓬'),
(0x104C5, 'M', u'𐓭'),
(0x104C6, 'M', u'𐓮'),
(0x104C7, 'M', u'𐓯'),
(0x104C8, 'M', u'𐓰'),
(0x104C9, 'M', u'𐓱'),
(0x104CA, 'M', u'𐓲'),
(0x104CB, 'M', u'𐓳'),
(0x104CC, 'M', u'𐓴'),
(0x104CD, 'M', u'𐓵'),
(0x104CE, 'M', u'𐓶'),
(0x104CF, 'M', u'𐓷'),
(0x104D0, 'M', u'𐓸'),
(0x104D1, 'M', u'𐓹'),
(0x104D2, 'M', u'𐓺'),
(0x104D3, 'M', u'𐓻'),
(0x104D4, 'X'),
(0x104D8, 'V'),
(0x104FC, 'X'),
(0x10500, 'V'),
(0x10528, 'X'),
(0x10530, 'V'),
(0x10564, 'X'),
(0x1056F, 'V'),
(0x10570, 'X'),
(0x10600, 'V'),
(0x10737, 'X'),
(0x10740, 'V'),
(0x10756, 'X'),
(0x10760, 'V'),
(0x10768, 'X'),
(0x10800, 'V'),
(0x10806, 'X'),
(0x10808, 'V'),
(0x10809, 'X'),
(0x1080A, 'V'),
(0x10836, 'X'),
(0x10837, 'V'),
(0x10839, 'X'),
(0x1083C, 'V'),
(0x1083D, 'X'),
(0x1083F, 'V'),
(0x10856, 'X'),
(0x10857, 'V'),
(0x1089F, 'X'),
(0x108A7, 'V'),
(0x108B0, 'X'),
(0x108E0, 'V'),
(0x108F3, 'X'),
(0x108F4, 'V'),
(0x108F6, 'X'),
(0x108FB, 'V'),
(0x1091C, 'X'),
(0x1091F, 'V'),
(0x1093A, 'X'),
(0x1093F, 'V'),
(0x10940, 'X'),
(0x10980, 'V'),
(0x109B8, 'X'),
(0x109BC, 'V'),
(0x109D0, 'X'),
(0x109D2, 'V'),
(0x10A04, 'X'),
(0x10A05, 'V'),
(0x10A07, 'X'),
(0x10A0C, 'V'),
(0x10A14, 'X'),
(0x10A15, 'V'),
(0x10A18, 'X'),
(0x10A19, 'V'),
(0x10A36, 'X'),
(0x10A38, 'V'),
(0x10A3B, 'X'),
(0x10A3F, 'V'),
(0x10A49, 'X'),
(0x10A50, 'V'),
(0x10A59, 'X'),
(0x10A60, 'V'),
(0x10AA0, 'X'),
(0x10AC0, 'V'),
(0x10AE7, 'X'),
(0x10AEB, 'V'),
(0x10AF7, 'X'),
(0x10B00, 'V'),
(0x10B36, 'X'),
(0x10B39, 'V'),
(0x10B56, 'X'),
(0x10B58, 'V'),
(0x10B73, 'X'),
(0x10B78, 'V'),
(0x10B92, 'X'),
(0x10B99, 'V'),
(0x10B9D, 'X'),
(0x10BA9, 'V'),
(0x10BB0, 'X'),
(0x10C00, 'V'),
]
|
[] |
def _seg_54():
return [
(0x104C0, 'M', u'𐓨'),
(0x104C1, 'M', u'𐓩'),
(0x104C2, 'M', u'𐓪'),
(0x104C3, 'M', u'𐓫'),
(0x104C4, 'M', u'𐓬'),
(0x104C5, 'M', u'𐓭'),
(0x104C6, 'M', u'𐓮'),
(0x104C7, 'M', u'𐓯'),
(0x104C8, 'M', u'𐓰'),
(0x104C9, 'M', u'𐓱'),
(0x104CA, 'M', u'𐓲'),
(0x104CB, 'M', u'𐓳'),
(0x104CC, 'M', u'𐓴'),
(0x104CD, 'M', u'𐓵'),
(0x104CE, 'M', u'𐓶'),
(0x104CF, 'M', u'𐓷'),
(0x104D0, 'M', u'𐓸'),
(0x104D1, 'M', u'𐓹'),
(0x104D2, 'M', u'𐓺'),
(0x104D3, 'M', u'𐓻'),
(0x104D4, 'X'),
(0x104D8, 'V'),
(0x104FC, 'X'),
(0x10500, 'V'),
(0x10528, 'X'),
(0x10530, 'V'),
(0x10564, 'X'),
(0x1056F, 'V'),
(0x10570, 'X'),
(0x10600, 'V'),
(0x10737, 'X'),
(0x10740, 'V'),
(0x10756, 'X'),
(0x10760, 'V'),
(0x10768, 'X'),
(0x10800, 'V'),
(0x10806, 'X'),
(0x10808, 'V'),
(0x10809, 'X'),
(0x1080A, 'V'),
(0x10836, 'X'),
(0x10837, 'V'),
(0x10839, 'X'),
(0x1083C, 'V'),
(0x1083D, 'X'),
(0x1083F, 'V'),
(0x10856, 'X'),
(0x10857, 'V'),
(0x1089F, 'X'),
(0x108A7, 'V'),
(0x108B0, 'X'),
(0x108E0, 'V'),
(0x108F3, 'X'),
(0x108F4, 'V'),
(0x108F6, 'X'),
(0x108FB, 'V'),
(0x1091C, 'X'),
(0x1091F, 'V'),
(0x1093A, 'X'),
(0x1093F, 'V'),
(0x10940, 'X'),
(0x10980, 'V'),
(0x109B8, 'X'),
(0x109BC, 'V'),
(0x109D0, 'X'),
(0x109D2, 'V'),
(0x10A04, 'X'),
(0x10A05, 'V'),
(0x10A07, 'X'),
(0x10A0C, 'V'),
(0x10A14, 'X'),
(0x10A15, 'V'),
(0x10A18, 'X'),
(0x10A19, 'V'),
(0x10A36, 'X'),
(0x10A38, 'V'),
(0x10A3B, 'X'),
(0x10A3F, 'V'),
(0x10A49, 'X'),
(0x10A50, 'V'),
(0x10A59, 'X'),
(0x10A60, 'V'),
(0x10AA0, 'X'),
(0x10AC0, 'V'),
(0x10AE7, 'X'),
(0x10AEB, 'V'),
(0x10AF7, 'X'),
(0x10B00, 'V'),
(0x10B36, 'X'),
(0x10B39, 'V'),
(0x10B56, 'X'),
(0x10B58, 'V'),
(0x10B73, 'X'),
(0x10B78, 'V'),
(0x10B92, 'X'),
(0x10B99, 'V'),
(0x10B9D, 'X'),
(0x10BA9, 'V'),
(0x10BB0, 'X'),
(0x10C00, 'V'),
]
|
[
"def",
"_seg_54",
"(",
")",
":",
"return",
"[",
"(",
"0x104C0",
",",
"'M'",
",",
"u'𐓨'),",
"",
"",
"(",
"0x104C1",
",",
"'M'",
",",
"u'𐓩'),",
"",
"",
"(",
"0x104C2",
",",
"'M'",
",",
"u'𐓪'),",
"",
"",
"(",
"0x104C3",
",",
"'M'",
",",
"u'𐓫'),",
"",
"",
"(",
"0x104C4",
",",
"'M'",
",",
"u'𐓬'),",
"",
"",
"(",
"0x104C5",
",",
"'M'",
",",
"u'𐓭'),",
"",
"",
"(",
"0x104C6",
",",
"'M'",
",",
"u'𐓮'),",
"",
"",
"(",
"0x104C7",
",",
"'M'",
",",
"u'𐓯'),",
"",
"",
"(",
"0x104C8",
",",
"'M'",
",",
"u'𐓰'),",
"",
"",
"(",
"0x104C9",
",",
"'M'",
",",
"u'𐓱'),",
"",
"",
"(",
"0x104CA",
",",
"'M'",
",",
"u'𐓲'),",
"",
"",
"(",
"0x104CB",
",",
"'M'",
",",
"u'𐓳'),",
"",
"",
"(",
"0x104CC",
",",
"'M'",
",",
"u'𐓴'),",
"",
"",
"(",
"0x104CD",
",",
"'M'",
",",
"u'𐓵'),",
"",
"",
"(",
"0x104CE",
",",
"'M'",
",",
"u'𐓶'),",
"",
"",
"(",
"0x104CF",
",",
"'M'",
",",
"u'𐓷'),",
"",
"",
"(",
"0x104D0",
",",
"'M'",
",",
"u'𐓸'),",
"",
"",
"(",
"0x104D1",
",",
"'M'",
",",
"u'𐓹'),",
"",
"",
"(",
"0x104D2",
",",
"'M'",
",",
"u'𐓺'),",
"",
"",
"(",
"0x104D3",
",",
"'M'",
",",
"u'𐓻'),",
"",
"",
"(",
"0x104D4",
",",
"'X'",
")",
",",
"(",
"0x104D8",
",",
"'V'",
")",
",",
"(",
"0x104FC",
",",
"'X'",
")",
",",
"(",
"0x10500",
",",
"'V'",
")",
",",
"(",
"0x10528",
",",
"'X'",
")",
",",
"(",
"0x10530",
",",
"'V'",
")",
",",
"(",
"0x10564",
",",
"'X'",
")",
",",
"(",
"0x1056F",
",",
"'V'",
")",
",",
"(",
"0x10570",
",",
"'X'",
")",
",",
"(",
"0x10600",
",",
"'V'",
")",
",",
"(",
"0x10737",
",",
"'X'",
")",
",",
"(",
"0x10740",
",",
"'V'",
")",
",",
"(",
"0x10756",
",",
"'X'",
")",
",",
"(",
"0x10760",
",",
"'V'",
")",
",",
"(",
"0x10768",
",",
"'X'",
")",
",",
"(",
"0x10800",
",",
"'V'",
")",
",",
"(",
"0x10806",
",",
"'X'",
")",
",",
"(",
"0x10808",
",",
"'V'",
")",
",",
"(",
"0x10809",
",",
"'X'",
")",
",",
"(",
"0x1080A",
",",
"'V'",
")",
",",
"(",
"0x10836",
",",
"'X'",
")",
",",
"(",
"0x10837",
",",
"'V'",
")",
",",
"(",
"0x10839",
",",
"'X'",
")",
",",
"(",
"0x1083C",
",",
"'V'",
")",
",",
"(",
"0x1083D",
",",
"'X'",
")",
",",
"(",
"0x1083F",
",",
"'V'",
")",
",",
"(",
"0x10856",
",",
"'X'",
")",
",",
"(",
"0x10857",
",",
"'V'",
")",
",",
"(",
"0x1089F",
",",
"'X'",
")",
",",
"(",
"0x108A7",
",",
"'V'",
")",
",",
"(",
"0x108B0",
",",
"'X'",
")",
",",
"(",
"0x108E0",
",",
"'V'",
")",
",",
"(",
"0x108F3",
",",
"'X'",
")",
",",
"(",
"0x108F4",
",",
"'V'",
")",
",",
"(",
"0x108F6",
",",
"'X'",
")",
",",
"(",
"0x108FB",
",",
"'V'",
")",
",",
"(",
"0x1091C",
",",
"'X'",
")",
",",
"(",
"0x1091F",
",",
"'V'",
")",
",",
"(",
"0x1093A",
",",
"'X'",
")",
",",
"(",
"0x1093F",
",",
"'V'",
")",
",",
"(",
"0x10940",
",",
"'X'",
")",
",",
"(",
"0x10980",
",",
"'V'",
")",
",",
"(",
"0x109B8",
",",
"'X'",
")",
",",
"(",
"0x109BC",
",",
"'V'",
")",
",",
"(",
"0x109D0",
",",
"'X'",
")",
",",
"(",
"0x109D2",
",",
"'V'",
")",
",",
"(",
"0x10A04",
",",
"'X'",
")",
",",
"(",
"0x10A05",
",",
"'V'",
")",
",",
"(",
"0x10A07",
",",
"'X'",
")",
",",
"(",
"0x10A0C",
",",
"'V'",
")",
",",
"(",
"0x10A14",
",",
"'X'",
")",
",",
"(",
"0x10A15",
",",
"'V'",
")",
",",
"(",
"0x10A18",
",",
"'X'",
")",
",",
"(",
"0x10A19",
",",
"'V'",
")",
",",
"(",
"0x10A36",
",",
"'X'",
")",
",",
"(",
"0x10A38",
",",
"'V'",
")",
",",
"(",
"0x10A3B",
",",
"'X'",
")",
",",
"(",
"0x10A3F",
",",
"'V'",
")",
",",
"(",
"0x10A49",
",",
"'X'",
")",
",",
"(",
"0x10A50",
",",
"'V'",
")",
",",
"(",
"0x10A59",
",",
"'X'",
")",
",",
"(",
"0x10A60",
",",
"'V'",
")",
",",
"(",
"0x10AA0",
",",
"'X'",
")",
",",
"(",
"0x10AC0",
",",
"'V'",
")",
",",
"(",
"0x10AE7",
",",
"'X'",
")",
",",
"(",
"0x10AEB",
",",
"'V'",
")",
",",
"(",
"0x10AF7",
",",
"'X'",
")",
",",
"(",
"0x10B00",
",",
"'V'",
")",
",",
"(",
"0x10B36",
",",
"'X'",
")",
",",
"(",
"0x10B39",
",",
"'V'",
")",
",",
"(",
"0x10B56",
",",
"'X'",
")",
",",
"(",
"0x10B58",
",",
"'V'",
")",
",",
"(",
"0x10B73",
",",
"'X'",
")",
",",
"(",
"0x10B78",
",",
"'V'",
")",
",",
"(",
"0x10B92",
",",
"'X'",
")",
",",
"(",
"0x10B99",
",",
"'V'",
")",
",",
"(",
"0x10B9D",
",",
"'X'",
")",
",",
"(",
"0x10BA9",
",",
"'V'",
")",
",",
"(",
"0x10BB0",
",",
"'X'",
")",
",",
"(",
"0x10C00",
",",
"'V'",
")",
",",
"]"
] |
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-windows/x86/idna/uts46data.py#L5624-L5726
|
|||
tensorflow/lingvo
|
ce10019243d954c3c3ebe739f7589b5eebfdf907
|
lingvo/core/batch_major_attention.py
|
python
|
LocalSelfAttention._StreamStepStaticComputeKeyValue3d
|
(self, theta, indices, inputs, state0)
|
return key, value, state1
|
Computes key/value tensors in use_3d_recurrent_state mode.
This mode treats state like a circular buffer, and uses scatter_nd_update
to update that buffer. This in-place update may be cheaper than using
tf.concat.
(Don't use this method when in minimize_state_size mode, you want the
_StreamStepStaticComputeKeyValueMinimal even if you're using
use_3d_recurrent_state mode)
Args:
theta: The theta NestedMap for this layer.
indices: Locations to store new recurrent state in the circular buffer
when in 3d mode.
inputs: [B, Q, D]: The inputs for this step, note that Q>=1.
state0: The recurrent state.
Returns:
key: [B, S, N, H]: Queries projected into key space.
value: [B, S, N, H]: Queries projected into value space.
state1: Updated recurrent state.
|
Computes key/value tensors in use_3d_recurrent_state mode.
|
[
"Computes",
"key",
"/",
"value",
"tensors",
"in",
"use_3d_recurrent_state",
"mode",
"."
] |
def _StreamStepStaticComputeKeyValue3d(self, theta, indices, inputs, state0):
"""Computes key/value tensors in use_3d_recurrent_state mode.
This mode treats state like a circular buffer, and uses scatter_nd_update
to update that buffer. This in-place update may be cheaper than using
tf.concat.
(Don't use this method when in minimize_state_size mode, you want the
_StreamStepStaticComputeKeyValueMinimal even if you're using
use_3d_recurrent_state mode)
Args:
theta: The theta NestedMap for this layer.
indices: Locations to store new recurrent state in the circular buffer
when in 3d mode.
inputs: [B, Q, D]: The inputs for this step, note that Q>=1.
state0: The recurrent state.
Returns:
key: [B, S, N, H]: Queries projected into key space.
value: [B, S, N, H]: Queries projected into value space.
state1: Updated recurrent state.
"""
dims = self._StreamStepDimensions(inputs)
state0.key = py_utils.HasShape(state0.key,
[dims.b, dims.s, dims.n * dims.h])
state0.value = py_utils.HasShape(state0.value,
py_utils.GetShape(state0.key))
def get_next_state(recur_state, inputs): # pylint:disable=invalid-name
next_state = tf.tensor_scatter_nd_update(recur_state, indices, inputs)
# [B, S, N, H]
outputs = tf.reshape(next_state, [dims.b, dims.s, dims.n, dims.h])
return outputs, next_state
# [B, Q, N * H]
incr_key = tf.einsum(
'DH,BTD->BTH',
tf.reshape(theta.key.w, [self.key.params.input_dim, dims.n * dims.h]),
inputs) + tf.reshape(theta.key.b, [-1])
# [B, Q, N * H]
incr_value = tf.einsum(
'DH,BTD->BTH',
tf.reshape(theta.value.w,
[self.value.params.input_dim, dims.n * dims.h]),
inputs) + tf.reshape(theta.value.b, [-1])
# [B, S, N, H], [B, S, N * H]
key, next_key = get_next_state(state0.key, incr_key)
# [B, S, N, H], [B, S, N * H]
value, next_value = get_next_state(state0.value, incr_value)
state1 = py_utils.NestedMap(key=next_key, value=next_value)
return key, value, state1
|
[
"def",
"_StreamStepStaticComputeKeyValue3d",
"(",
"self",
",",
"theta",
",",
"indices",
",",
"inputs",
",",
"state0",
")",
":",
"dims",
"=",
"self",
".",
"_StreamStepDimensions",
"(",
"inputs",
")",
"state0",
".",
"key",
"=",
"py_utils",
".",
"HasShape",
"(",
"state0",
".",
"key",
",",
"[",
"dims",
".",
"b",
",",
"dims",
".",
"s",
",",
"dims",
".",
"n",
"*",
"dims",
".",
"h",
"]",
")",
"state0",
".",
"value",
"=",
"py_utils",
".",
"HasShape",
"(",
"state0",
".",
"value",
",",
"py_utils",
".",
"GetShape",
"(",
"state0",
".",
"key",
")",
")",
"def",
"get_next_state",
"(",
"recur_state",
",",
"inputs",
")",
":",
"# pylint:disable=invalid-name",
"next_state",
"=",
"tf",
".",
"tensor_scatter_nd_update",
"(",
"recur_state",
",",
"indices",
",",
"inputs",
")",
"# [B, S, N, H]",
"outputs",
"=",
"tf",
".",
"reshape",
"(",
"next_state",
",",
"[",
"dims",
".",
"b",
",",
"dims",
".",
"s",
",",
"dims",
".",
"n",
",",
"dims",
".",
"h",
"]",
")",
"return",
"outputs",
",",
"next_state",
"# [B, Q, N * H]",
"incr_key",
"=",
"tf",
".",
"einsum",
"(",
"'DH,BTD->BTH'",
",",
"tf",
".",
"reshape",
"(",
"theta",
".",
"key",
".",
"w",
",",
"[",
"self",
".",
"key",
".",
"params",
".",
"input_dim",
",",
"dims",
".",
"n",
"*",
"dims",
".",
"h",
"]",
")",
",",
"inputs",
")",
"+",
"tf",
".",
"reshape",
"(",
"theta",
".",
"key",
".",
"b",
",",
"[",
"-",
"1",
"]",
")",
"# [B, Q, N * H]",
"incr_value",
"=",
"tf",
".",
"einsum",
"(",
"'DH,BTD->BTH'",
",",
"tf",
".",
"reshape",
"(",
"theta",
".",
"value",
".",
"w",
",",
"[",
"self",
".",
"value",
".",
"params",
".",
"input_dim",
",",
"dims",
".",
"n",
"*",
"dims",
".",
"h",
"]",
")",
",",
"inputs",
")",
"+",
"tf",
".",
"reshape",
"(",
"theta",
".",
"value",
".",
"b",
",",
"[",
"-",
"1",
"]",
")",
"# [B, S, N, H], [B, S, N * H]",
"key",
",",
"next_key",
"=",
"get_next_state",
"(",
"state0",
".",
"key",
",",
"incr_key",
")",
"# [B, S, N, H], [B, S, N * H]",
"value",
",",
"next_value",
"=",
"get_next_state",
"(",
"state0",
".",
"value",
",",
"incr_value",
")",
"state1",
"=",
"py_utils",
".",
"NestedMap",
"(",
"key",
"=",
"next_key",
",",
"value",
"=",
"next_value",
")",
"return",
"key",
",",
"value",
",",
"state1"
] |
https://github.com/tensorflow/lingvo/blob/ce10019243d954c3c3ebe739f7589b5eebfdf907/lingvo/core/batch_major_attention.py#L2274-L2328
|
|
howie6879/Sanic-For-Pythoneer
|
1006db52f38de7a568f8e2437589c45dd4cc2235
|
examples/demo03/sample02/src/views/rss.py
|
python
|
index
|
(request)
|
return json(data)
|
[] |
async def index(request):
url = "http://blog.howie6879.cn/atom.xml"
feed = parse(url)
articles = feed['entries']
data = []
for article in articles:
data.append({"title": article["title_detail"]["value"], "link": article["link"]})
return json(data)
|
[
"async",
"def",
"index",
"(",
"request",
")",
":",
"url",
"=",
"\"http://blog.howie6879.cn/atom.xml\"",
"feed",
"=",
"parse",
"(",
"url",
")",
"articles",
"=",
"feed",
"[",
"'entries'",
"]",
"data",
"=",
"[",
"]",
"for",
"article",
"in",
"articles",
":",
"data",
".",
"append",
"(",
"{",
"\"title\"",
":",
"article",
"[",
"\"title_detail\"",
"]",
"[",
"\"value\"",
"]",
",",
"\"link\"",
":",
"article",
"[",
"\"link\"",
"]",
"}",
")",
"return",
"json",
"(",
"data",
")"
] |
https://github.com/howie6879/Sanic-For-Pythoneer/blob/1006db52f38de7a568f8e2437589c45dd4cc2235/examples/demo03/sample02/src/views/rss.py#L29-L36
|
|||
cloudera/hue
|
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
|
desktop/core/ext-py/PyYAML-5.4.1/lib/yaml/__init__.py
|
python
|
emit
|
(events, stream=None, Dumper=Dumper,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None)
|
Emit YAML parsing events into a stream.
If stream is None, return the produced string instead.
|
Emit YAML parsing events into a stream.
If stream is None, return the produced string instead.
|
[
"Emit",
"YAML",
"parsing",
"events",
"into",
"a",
"stream",
".",
"If",
"stream",
"is",
"None",
"return",
"the",
"produced",
"string",
"instead",
"."
] |
def emit(events, stream=None, Dumper=Dumper,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None):
"""
Emit YAML parsing events into a stream.
If stream is None, return the produced string instead.
"""
getvalue = None
if stream is None:
from StringIO import StringIO
stream = StringIO()
getvalue = stream.getvalue
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
try:
for event in events:
dumper.emit(event)
finally:
dumper.dispose()
if getvalue:
return getvalue()
|
[
"def",
"emit",
"(",
"events",
",",
"stream",
"=",
"None",
",",
"Dumper",
"=",
"Dumper",
",",
"canonical",
"=",
"None",
",",
"indent",
"=",
"None",
",",
"width",
"=",
"None",
",",
"allow_unicode",
"=",
"None",
",",
"line_break",
"=",
"None",
")",
":",
"getvalue",
"=",
"None",
"if",
"stream",
"is",
"None",
":",
"from",
"StringIO",
"import",
"StringIO",
"stream",
"=",
"StringIO",
"(",
")",
"getvalue",
"=",
"stream",
".",
"getvalue",
"dumper",
"=",
"Dumper",
"(",
"stream",
",",
"canonical",
"=",
"canonical",
",",
"indent",
"=",
"indent",
",",
"width",
"=",
"width",
",",
"allow_unicode",
"=",
"allow_unicode",
",",
"line_break",
"=",
"line_break",
")",
"try",
":",
"for",
"event",
"in",
"events",
":",
"dumper",
".",
"emit",
"(",
"event",
")",
"finally",
":",
"dumper",
".",
"dispose",
"(",
")",
"if",
"getvalue",
":",
"return",
"getvalue",
"(",
")"
] |
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/PyYAML-5.4.1/lib/yaml/__init__.py#L194-L214
|
||
lmco/laikaboss
|
eb04b4c4fe7bf3fc86c26b8a37451d99e6fc9c17
|
laikaboss/clientLib.py
|
python
|
get_scanObjectUID
|
(scanObject)
|
return scanObject.uuid
|
Get the UID for a ScanObject instance.
Arguments:
scanObject -- a ScanObject instance
Returns:
A string containing the UID of the object.
|
Get the UID for a ScanObject instance.
Arguments:
scanObject -- a ScanObject instance
|
[
"Get",
"the",
"UID",
"for",
"a",
"ScanObject",
"instance",
".",
"Arguments",
":",
"scanObject",
"--",
"a",
"ScanObject",
"instance"
] |
def get_scanObjectUID(scanObject):
'''
Get the UID for a ScanObject instance.
Arguments:
scanObject -- a ScanObject instance
Returns:
A string containing the UID of the object.
'''
return scanObject.uuid
|
[
"def",
"get_scanObjectUID",
"(",
"scanObject",
")",
":",
"return",
"scanObject",
".",
"uuid"
] |
https://github.com/lmco/laikaboss/blob/eb04b4c4fe7bf3fc86c26b8a37451d99e6fc9c17/laikaboss/clientLib.py#L103-L113
|
|
JiYou/openstack
|
8607dd488bde0905044b303eb6e52bdea6806923
|
packages/source/swift/swift/common/manager.py
|
python
|
Server.spawn
|
(self, conf_file, once=False, wait=True, daemon=True, **kwargs)
|
return proc.pid
|
Launch a subprocess for this server.
:param conf_file: path to conf_file to use as first arg
:param once: boolean, add once argument to command
:param wait: boolean, if true capture stdout with a pipe
:param daemon: boolean, if true ask server to log to console
:returns : the pid of the spawned process
|
Launch a subprocess for this server.
|
[
"Launch",
"a",
"subprocess",
"for",
"this",
"server",
"."
] |
def spawn(self, conf_file, once=False, wait=True, daemon=True, **kwargs):
"""Launch a subprocess for this server.
:param conf_file: path to conf_file to use as first arg
:param once: boolean, add once argument to command
:param wait: boolean, if true capture stdout with a pipe
:param daemon: boolean, if true ask server to log to console
:returns : the pid of the spawned process
"""
args = [self.cmd, conf_file]
if once:
args.append('once')
if not daemon:
# ask the server to log to console
args.append('verbose')
# figure out what we're going to do with stdio
if not daemon:
# do nothing, this process is open until the spawns close anyway
re_out = None
re_err = None
else:
re_err = subprocess.STDOUT
if wait:
# we're going to need to block on this...
re_out = subprocess.PIPE
else:
re_out = open(os.devnull, 'w+b')
proc = subprocess.Popen(args, stdout=re_out, stderr=re_err)
pid_file = self.get_pid_file_name(conf_file)
write_file(pid_file, proc.pid)
self.procs.append(proc)
return proc.pid
|
[
"def",
"spawn",
"(",
"self",
",",
"conf_file",
",",
"once",
"=",
"False",
",",
"wait",
"=",
"True",
",",
"daemon",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"[",
"self",
".",
"cmd",
",",
"conf_file",
"]",
"if",
"once",
":",
"args",
".",
"append",
"(",
"'once'",
")",
"if",
"not",
"daemon",
":",
"# ask the server to log to console",
"args",
".",
"append",
"(",
"'verbose'",
")",
"# figure out what we're going to do with stdio",
"if",
"not",
"daemon",
":",
"# do nothing, this process is open until the spawns close anyway",
"re_out",
"=",
"None",
"re_err",
"=",
"None",
"else",
":",
"re_err",
"=",
"subprocess",
".",
"STDOUT",
"if",
"wait",
":",
"# we're going to need to block on this...",
"re_out",
"=",
"subprocess",
".",
"PIPE",
"else",
":",
"re_out",
"=",
"open",
"(",
"os",
".",
"devnull",
",",
"'w+b'",
")",
"proc",
"=",
"subprocess",
".",
"Popen",
"(",
"args",
",",
"stdout",
"=",
"re_out",
",",
"stderr",
"=",
"re_err",
")",
"pid_file",
"=",
"self",
".",
"get_pid_file_name",
"(",
"conf_file",
")",
"write_file",
"(",
"pid_file",
",",
"proc",
".",
"pid",
")",
"self",
".",
"procs",
".",
"append",
"(",
"proc",
")",
"return",
"proc",
".",
"pid"
] |
https://github.com/JiYou/openstack/blob/8607dd488bde0905044b303eb6e52bdea6806923/packages/source/swift/swift/common/manager.py#L506-L539
|
|
edfungus/Crouton
|
ada98b3930192938a48909072b45cb84b945f875
|
clients/python_clients/cf_demo_client/cf_env/lib/python2.7/site-packages/werkzeug/wrappers.py
|
python
|
ETagResponseMixin.freeze
|
(self, no_etag=False)
|
Call this method if you want to make your response object ready for
pickeling. This buffers the generator if there is one. This also
sets the etag unless `no_etag` is set to `True`.
|
Call this method if you want to make your response object ready for
pickeling. This buffers the generator if there is one. This also
sets the etag unless `no_etag` is set to `True`.
|
[
"Call",
"this",
"method",
"if",
"you",
"want",
"to",
"make",
"your",
"response",
"object",
"ready",
"for",
"pickeling",
".",
"This",
"buffers",
"the",
"generator",
"if",
"there",
"is",
"one",
".",
"This",
"also",
"sets",
"the",
"etag",
"unless",
"no_etag",
"is",
"set",
"to",
"True",
"."
] |
def freeze(self, no_etag=False):
"""Call this method if you want to make your response object ready for
pickeling. This buffers the generator if there is one. This also
sets the etag unless `no_etag` is set to `True`.
"""
if not no_etag:
self.add_etag()
super(ETagResponseMixin, self).freeze()
|
[
"def",
"freeze",
"(",
"self",
",",
"no_etag",
"=",
"False",
")",
":",
"if",
"not",
"no_etag",
":",
"self",
".",
"add_etag",
"(",
")",
"super",
"(",
"ETagResponseMixin",
",",
"self",
")",
".",
"freeze",
"(",
")"
] |
https://github.com/edfungus/Crouton/blob/ada98b3930192938a48909072b45cb84b945f875/clients/python_clients/cf_demo_client/cf_env/lib/python2.7/site-packages/werkzeug/wrappers.py#L1460-L1467
|
||
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/pygments/formatters/img.py
|
python
|
ImageFormatter._get_text_color
|
(self, style)
|
return fill
|
Get the correct color for the token from the style.
|
Get the correct color for the token from the style.
|
[
"Get",
"the",
"correct",
"color",
"for",
"the",
"token",
"from",
"the",
"style",
"."
] |
def _get_text_color(self, style):
"""
Get the correct color for the token from the style.
"""
if style['color'] is not None:
fill = '#' + style['color']
else:
fill = '#000'
return fill
|
[
"def",
"_get_text_color",
"(",
"self",
",",
"style",
")",
":",
"if",
"style",
"[",
"'color'",
"]",
"is",
"not",
"None",
":",
"fill",
"=",
"'#'",
"+",
"style",
"[",
"'color'",
"]",
"else",
":",
"fill",
"=",
"'#000'",
"return",
"fill"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/pygments/formatters/img.py#L426-L434
|
|
JasonKessler/scattertext
|
ef33f06d4c31f9d64b551a7ab86bf157aca82644
|
scattertext/__init__.py
|
python
|
produce_pca_explorer
|
(corpus,
category,
word2vec_model=None,
projection_model=None,
embeddings=None,
projection=None,
term_acceptance_re=re.compile('[a-z]{3,}'),
x_dim=0,
y_dim=1,
scaler=scale,
show_axes=False,
show_dimensions_on_tooltip=True,
x_label='',
y_label='',
**kwargs)
|
return html
|
Parameters
----------
corpus : ParsedCorpus
It is highly recommended to use a stoplisted, unigram corpus-- `corpus.get_stoplisted_unigram_corpus()`
category : str
word2vec_model : Word2Vec
A gensim word2vec model. A default model will be used instead. See Word2VecFromParsedCorpus for the default
model.
projection_model : sklearn-style dimensionality reduction model. Ignored if 'projection' is presents
By default: umap.UMAP(min_dist=0.5, metric='cosine') unless projection is present. If so,
You could also use, e.g., sklearn.manifold.TSNE(perplexity=10, n_components=2, init='pca', n_iter=2500, random_state=23)
embeddings : array[len(corpus.get_terms()), X]
Word embeddings. If None (default), and no value is passed into projection, use word2vec_model
projection : DataFrame('x': array[len(corpus.get_terms())], 'y': array[len(corpus.get_terms())])
If None (default), produced using projection_model
term_acceptance_re : SRE_Pattern,
Regular expression to identify valid terms
x_dim : int, default 0
Dimension of transformation matrix for x-axis
y_dim : int, default 1
Dimension of transformation matrix for y-axis
scalers : function , default scattertext.Scalers.scale
Function used to scale projection
show_axes : bool, default False
Show the ticked axes on the plot. If false, show inner axes as a crosshair.
show_dimensions_on_tooltip : bool, False by default
If true, shows dimension positions on tooltip, along with term name. Otherwise, default to the
get_tooltip_content parameter.
kwargs : dict
Remaining produce_scattertext_explorer keywords get_tooltip_content
Returns
-------
str
HTML of visualization
|
Parameters
----------
corpus : ParsedCorpus
It is highly recommended to use a stoplisted, unigram corpus-- `corpus.get_stoplisted_unigram_corpus()`
category : str
word2vec_model : Word2Vec
A gensim word2vec model. A default model will be used instead. See Word2VecFromParsedCorpus for the default
model.
projection_model : sklearn-style dimensionality reduction model. Ignored if 'projection' is presents
By default: umap.UMAP(min_dist=0.5, metric='cosine') unless projection is present. If so,
You could also use, e.g., sklearn.manifold.TSNE(perplexity=10, n_components=2, init='pca', n_iter=2500, random_state=23)
embeddings : array[len(corpus.get_terms()), X]
Word embeddings. If None (default), and no value is passed into projection, use word2vec_model
projection : DataFrame('x': array[len(corpus.get_terms())], 'y': array[len(corpus.get_terms())])
If None (default), produced using projection_model
term_acceptance_re : SRE_Pattern,
Regular expression to identify valid terms
x_dim : int, default 0
Dimension of transformation matrix for x-axis
y_dim : int, default 1
Dimension of transformation matrix for y-axis
scalers : function , default scattertext.Scalers.scale
Function used to scale projection
show_axes : bool, default False
Show the ticked axes on the plot. If false, show inner axes as a crosshair.
show_dimensions_on_tooltip : bool, False by default
If true, shows dimension positions on tooltip, along with term name. Otherwise, default to the
get_tooltip_content parameter.
kwargs : dict
Remaining produce_scattertext_explorer keywords get_tooltip_content
|
[
"Parameters",
"----------",
"corpus",
":",
"ParsedCorpus",
"It",
"is",
"highly",
"recommended",
"to",
"use",
"a",
"stoplisted",
"unigram",
"corpus",
"--",
"corpus",
".",
"get_stoplisted_unigram_corpus",
"()",
"category",
":",
"str",
"word2vec_model",
":",
"Word2Vec",
"A",
"gensim",
"word2vec",
"model",
".",
"A",
"default",
"model",
"will",
"be",
"used",
"instead",
".",
"See",
"Word2VecFromParsedCorpus",
"for",
"the",
"default",
"model",
".",
"projection_model",
":",
"sklearn",
"-",
"style",
"dimensionality",
"reduction",
"model",
".",
"Ignored",
"if",
"projection",
"is",
"presents",
"By",
"default",
":",
"umap",
".",
"UMAP",
"(",
"min_dist",
"=",
"0",
".",
"5",
"metric",
"=",
"cosine",
")",
"unless",
"projection",
"is",
"present",
".",
"If",
"so",
"You",
"could",
"also",
"use",
"e",
".",
"g",
".",
"sklearn",
".",
"manifold",
".",
"TSNE",
"(",
"perplexity",
"=",
"10",
"n_components",
"=",
"2",
"init",
"=",
"pca",
"n_iter",
"=",
"2500",
"random_state",
"=",
"23",
")",
"embeddings",
":",
"array",
"[",
"len",
"(",
"corpus",
".",
"get_terms",
"()",
")",
"X",
"]",
"Word",
"embeddings",
".",
"If",
"None",
"(",
"default",
")",
"and",
"no",
"value",
"is",
"passed",
"into",
"projection",
"use",
"word2vec_model",
"projection",
":",
"DataFrame",
"(",
"x",
":",
"array",
"[",
"len",
"(",
"corpus",
".",
"get_terms",
"()",
")",
"]",
"y",
":",
"array",
"[",
"len",
"(",
"corpus",
".",
"get_terms",
"()",
")",
"]",
")",
"If",
"None",
"(",
"default",
")",
"produced",
"using",
"projection_model",
"term_acceptance_re",
":",
"SRE_Pattern",
"Regular",
"expression",
"to",
"identify",
"valid",
"terms",
"x_dim",
":",
"int",
"default",
"0",
"Dimension",
"of",
"transformation",
"matrix",
"for",
"x",
"-",
"axis",
"y_dim",
":",
"int",
"default",
"1",
"Dimension",
"of",
"transformation",
"matrix",
"for",
"y",
"-",
"axis",
"scalers",
":",
"function",
"default",
"scattertext",
".",
"Scalers",
".",
"scale",
"Function",
"used",
"to",
"scale",
"projection",
"show_axes",
":",
"bool",
"default",
"False",
"Show",
"the",
"ticked",
"axes",
"on",
"the",
"plot",
".",
"If",
"false",
"show",
"inner",
"axes",
"as",
"a",
"crosshair",
".",
"show_dimensions_on_tooltip",
":",
"bool",
"False",
"by",
"default",
"If",
"true",
"shows",
"dimension",
"positions",
"on",
"tooltip",
"along",
"with",
"term",
"name",
".",
"Otherwise",
"default",
"to",
"the",
"get_tooltip_content",
"parameter",
".",
"kwargs",
":",
"dict",
"Remaining",
"produce_scattertext_explorer",
"keywords",
"get_tooltip_content"
] |
def produce_pca_explorer(corpus,
category,
word2vec_model=None,
projection_model=None,
embeddings=None,
projection=None,
term_acceptance_re=re.compile('[a-z]{3,}'),
x_dim=0,
y_dim=1,
scaler=scale,
show_axes=False,
show_dimensions_on_tooltip=True,
x_label='',
y_label='',
**kwargs):
"""
Parameters
----------
corpus : ParsedCorpus
It is highly recommended to use a stoplisted, unigram corpus-- `corpus.get_stoplisted_unigram_corpus()`
category : str
word2vec_model : Word2Vec
A gensim word2vec model. A default model will be used instead. See Word2VecFromParsedCorpus for the default
model.
projection_model : sklearn-style dimensionality reduction model. Ignored if 'projection' is presents
By default: umap.UMAP(min_dist=0.5, metric='cosine') unless projection is present. If so,
You could also use, e.g., sklearn.manifold.TSNE(perplexity=10, n_components=2, init='pca', n_iter=2500, random_state=23)
embeddings : array[len(corpus.get_terms()), X]
Word embeddings. If None (default), and no value is passed into projection, use word2vec_model
projection : DataFrame('x': array[len(corpus.get_terms())], 'y': array[len(corpus.get_terms())])
If None (default), produced using projection_model
term_acceptance_re : SRE_Pattern,
Regular expression to identify valid terms
x_dim : int, default 0
Dimension of transformation matrix for x-axis
y_dim : int, default 1
Dimension of transformation matrix for y-axis
scalers : function , default scattertext.Scalers.scale
Function used to scale projection
show_axes : bool, default False
Show the ticked axes on the plot. If false, show inner axes as a crosshair.
show_dimensions_on_tooltip : bool, False by default
If true, shows dimension positions on tooltip, along with term name. Otherwise, default to the
get_tooltip_content parameter.
kwargs : dict
Remaining produce_scattertext_explorer keywords get_tooltip_content
Returns
-------
str
HTML of visualization
"""
if projection is None:
embeddings_resolover = EmbeddingsResolver(corpus)
if embeddings is not None:
embeddings_resolover.set_embeddings(embeddings)
else:
embeddings_resolover.set_embeddings_model(word2vec_model, term_acceptance_re)
corpus, projection = embeddings_resolover.project_embeddings(projection_model, x_dim=x_dim, y_dim=y_dim)
else:
assert type(projection) == pd.DataFrame
assert 'x' in projection and 'y' in projection
if kwargs.get('use_non_text_features', False):
assert set(projection.index) == set(corpus.get_metadata())
else:
assert set(projection.index) == set(corpus.get_terms())
if show_dimensions_on_tooltip:
kwargs['get_tooltip_content'] = '''(function(d) {
return d.term + "<br/>Dim %s: " + Math.round(d.ox*1000)/1000 + "<br/>Dim %s: " + Math.round(d.oy*1000)/1000
})''' % (x_dim, y_dim)
html = produce_scattertext_explorer(
corpus=corpus,
category=category,
minimum_term_frequency=0,
sort_by_dist=False,
original_x=projection['x'],
original_y=projection['y'],
x_coords=scaler(projection['x']),
y_coords=scaler(projection['y']),
y_label=y_label,
x_label=x_label,
show_axes=show_axes,
horizontal_line_y_position=kwargs.get('horizontal_line_y_position', 0),
vertical_line_x_position=kwargs.get('vertical_line_x_position', 0),
**kwargs
)
return html
|
[
"def",
"produce_pca_explorer",
"(",
"corpus",
",",
"category",
",",
"word2vec_model",
"=",
"None",
",",
"projection_model",
"=",
"None",
",",
"embeddings",
"=",
"None",
",",
"projection",
"=",
"None",
",",
"term_acceptance_re",
"=",
"re",
".",
"compile",
"(",
"'[a-z]{3,}'",
")",
",",
"x_dim",
"=",
"0",
",",
"y_dim",
"=",
"1",
",",
"scaler",
"=",
"scale",
",",
"show_axes",
"=",
"False",
",",
"show_dimensions_on_tooltip",
"=",
"True",
",",
"x_label",
"=",
"''",
",",
"y_label",
"=",
"''",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"projection",
"is",
"None",
":",
"embeddings_resolover",
"=",
"EmbeddingsResolver",
"(",
"corpus",
")",
"if",
"embeddings",
"is",
"not",
"None",
":",
"embeddings_resolover",
".",
"set_embeddings",
"(",
"embeddings",
")",
"else",
":",
"embeddings_resolover",
".",
"set_embeddings_model",
"(",
"word2vec_model",
",",
"term_acceptance_re",
")",
"corpus",
",",
"projection",
"=",
"embeddings_resolover",
".",
"project_embeddings",
"(",
"projection_model",
",",
"x_dim",
"=",
"x_dim",
",",
"y_dim",
"=",
"y_dim",
")",
"else",
":",
"assert",
"type",
"(",
"projection",
")",
"==",
"pd",
".",
"DataFrame",
"assert",
"'x'",
"in",
"projection",
"and",
"'y'",
"in",
"projection",
"if",
"kwargs",
".",
"get",
"(",
"'use_non_text_features'",
",",
"False",
")",
":",
"assert",
"set",
"(",
"projection",
".",
"index",
")",
"==",
"set",
"(",
"corpus",
".",
"get_metadata",
"(",
")",
")",
"else",
":",
"assert",
"set",
"(",
"projection",
".",
"index",
")",
"==",
"set",
"(",
"corpus",
".",
"get_terms",
"(",
")",
")",
"if",
"show_dimensions_on_tooltip",
":",
"kwargs",
"[",
"'get_tooltip_content'",
"]",
"=",
"'''(function(d) {\n return d.term + \"<br/>Dim %s: \" + Math.round(d.ox*1000)/1000 + \"<br/>Dim %s: \" + Math.round(d.oy*1000)/1000 \n })'''",
"%",
"(",
"x_dim",
",",
"y_dim",
")",
"html",
"=",
"produce_scattertext_explorer",
"(",
"corpus",
"=",
"corpus",
",",
"category",
"=",
"category",
",",
"minimum_term_frequency",
"=",
"0",
",",
"sort_by_dist",
"=",
"False",
",",
"original_x",
"=",
"projection",
"[",
"'x'",
"]",
",",
"original_y",
"=",
"projection",
"[",
"'y'",
"]",
",",
"x_coords",
"=",
"scaler",
"(",
"projection",
"[",
"'x'",
"]",
")",
",",
"y_coords",
"=",
"scaler",
"(",
"projection",
"[",
"'y'",
"]",
")",
",",
"y_label",
"=",
"y_label",
",",
"x_label",
"=",
"x_label",
",",
"show_axes",
"=",
"show_axes",
",",
"horizontal_line_y_position",
"=",
"kwargs",
".",
"get",
"(",
"'horizontal_line_y_position'",
",",
"0",
")",
",",
"vertical_line_x_position",
"=",
"kwargs",
".",
"get",
"(",
"'vertical_line_x_position'",
",",
"0",
")",
",",
"*",
"*",
"kwargs",
")",
"return",
"html"
] |
https://github.com/JasonKessler/scattertext/blob/ef33f06d4c31f9d64b551a7ab86bf157aca82644/scattertext/__init__.py#L1494-L1580
|
|
mbusb/multibootusb
|
fa89b28f27891a9ce8d6e2a5737baa2e6ee83dfd
|
scripts/param_rewrite.py
|
python
|
op_remove_keys
|
(keys, params)
|
return [x for x in params if all([not x.startswith(k) for k in keys])]
|
[] |
def op_remove_keys(keys, params):
return [x for x in params if all([not x.startswith(k) for k in keys])]
|
[
"def",
"op_remove_keys",
"(",
"keys",
",",
"params",
")",
":",
"return",
"[",
"x",
"for",
"x",
"in",
"params",
"if",
"all",
"(",
"[",
"not",
"x",
".",
"startswith",
"(",
"k",
")",
"for",
"k",
"in",
"keys",
"]",
")",
"]"
] |
https://github.com/mbusb/multibootusb/blob/fa89b28f27891a9ce8d6e2a5737baa2e6ee83dfd/scripts/param_rewrite.py#L57-L58
|
|||
LCAV/pyroomacoustics
|
15a86425b68969b2109860ca3614f0cbf92b1bd0
|
pyroomacoustics/transform/stft.py
|
python
|
analysis
|
(x, L, hop, win=None, zp_back=0, zp_front=0)
|
return the_stft.analysis(x)
|
Convenience function for one-shot STFT
Parameters
----------
x: array_like, (n_samples) or (n_samples, n_channels)
input signal
L: int
frame size
hop: int
shift size between frames
win: array_like
the window to apply (default None)
zp_back: int
zero padding to apply at the end of the frame
zp_front: int
zero padding to apply at the beginning of the frame
Returns
-------
X: ndarray, (n_frames, n_frequencies) or (n_frames, n_frequencies, n_channels)
The STFT of x
|
Convenience function for one-shot STFT
|
[
"Convenience",
"function",
"for",
"one",
"-",
"shot",
"STFT"
] |
def analysis(x, L, hop, win=None, zp_back=0, zp_front=0):
"""
Convenience function for one-shot STFT
Parameters
----------
x: array_like, (n_samples) or (n_samples, n_channels)
input signal
L: int
frame size
hop: int
shift size between frames
win: array_like
the window to apply (default None)
zp_back: int
zero padding to apply at the end of the frame
zp_front: int
zero padding to apply at the beginning of the frame
Returns
-------
X: ndarray, (n_frames, n_frequencies) or (n_frames, n_frequencies, n_channels)
The STFT of x
"""
if x.ndim == 2:
channels = x.shape[1]
else:
channels = 1
the_stft = STFT(
L, hop=hop, analysis_window=win, channels=channels, precision=x.dtype
)
if zp_back > 0:
the_stft.zero_pad_back(zp_back)
if zp_front > 0:
the_stft.zero_pad_front(zp_front)
# apply transform
return the_stft.analysis(x)
|
[
"def",
"analysis",
"(",
"x",
",",
"L",
",",
"hop",
",",
"win",
"=",
"None",
",",
"zp_back",
"=",
"0",
",",
"zp_front",
"=",
"0",
")",
":",
"if",
"x",
".",
"ndim",
"==",
"2",
":",
"channels",
"=",
"x",
".",
"shape",
"[",
"1",
"]",
"else",
":",
"channels",
"=",
"1",
"the_stft",
"=",
"STFT",
"(",
"L",
",",
"hop",
"=",
"hop",
",",
"analysis_window",
"=",
"win",
",",
"channels",
"=",
"channels",
",",
"precision",
"=",
"x",
".",
"dtype",
")",
"if",
"zp_back",
">",
"0",
":",
"the_stft",
".",
"zero_pad_back",
"(",
"zp_back",
")",
"if",
"zp_front",
">",
"0",
":",
"the_stft",
".",
"zero_pad_front",
"(",
"zp_front",
")",
"# apply transform",
"return",
"the_stft",
".",
"analysis",
"(",
"x",
")"
] |
https://github.com/LCAV/pyroomacoustics/blob/15a86425b68969b2109860ca3614f0cbf92b1bd0/pyroomacoustics/transform/stft.py#L773-L814
|
|
junyanz/pytorch-CycleGAN-and-pix2pix
|
003efc4c8819de47ff11b5a0af7ba09aee7f5fc1
|
models/base_model.py
|
python
|
BaseModel.forward
|
(self)
|
Run forward pass; called by both functions <optimize_parameters> and <test>.
|
Run forward pass; called by both functions <optimize_parameters> and <test>.
|
[
"Run",
"forward",
"pass",
";",
"called",
"by",
"both",
"functions",
"<optimize_parameters",
">",
"and",
"<test",
">",
"."
] |
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
pass
|
[
"def",
"forward",
"(",
"self",
")",
":",
"pass"
] |
https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/003efc4c8819de47ff11b5a0af7ba09aee7f5fc1/models/base_model.py#L69-L71
|
||
oracle/graalpython
|
577e02da9755d916056184ec441c26e00b70145c
|
graalpython/lib-python/3/xml/sax/xmlreader.py
|
python
|
XMLReader.setLocale
|
(self, locale)
|
Allow an application to set the locale for errors and warnings.
SAX parsers are not required to provide localization for errors
and warnings; if they cannot support the requested locale,
however, they must raise a SAX exception. Applications may
request a locale change in the middle of a parse.
|
Allow an application to set the locale for errors and warnings.
|
[
"Allow",
"an",
"application",
"to",
"set",
"the",
"locale",
"for",
"errors",
"and",
"warnings",
"."
] |
def setLocale(self, locale):
"""Allow an application to set the locale for errors and warnings.
SAX parsers are not required to provide localization for errors
and warnings; if they cannot support the requested locale,
however, they must raise a SAX exception. Applications may
request a locale change in the middle of a parse."""
raise SAXNotSupportedException("Locale support not implemented")
|
[
"def",
"setLocale",
"(",
"self",
",",
"locale",
")",
":",
"raise",
"SAXNotSupportedException",
"(",
"\"Locale support not implemented\"",
")"
] |
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/xml/sax/xmlreader.py#L66-L73
|
||
Autodesk/molecular-design-toolkit
|
5f45a47fea21d3603899a6366cb163024f0e2ec4
|
moldesign/units/unitsystem.py
|
python
|
UnitSystem.convert
|
(self, quantity)
|
Convert a quantity into this unit system.
Args:
quantity (MdtQuantity or MdtUnit): quantity to convert
|
Convert a quantity into this unit system.
|
[
"Convert",
"a",
"quantity",
"into",
"this",
"unit",
"system",
"."
] |
def convert(self, quantity):
""" Convert a quantity into this unit system.
Args:
quantity (MdtQuantity or MdtUnit): quantity to convert
"""
baseunit = self.get_baseunit(quantity)
if baseunit == ureg.dimensionless:
return quantity * ureg.dimensionless
else:
result = quantity.to(baseunit)
return result
|
[
"def",
"convert",
"(",
"self",
",",
"quantity",
")",
":",
"baseunit",
"=",
"self",
".",
"get_baseunit",
"(",
"quantity",
")",
"if",
"baseunit",
"==",
"ureg",
".",
"dimensionless",
":",
"return",
"quantity",
"*",
"ureg",
".",
"dimensionless",
"else",
":",
"result",
"=",
"quantity",
".",
"to",
"(",
"baseunit",
")",
"return",
"result"
] |
https://github.com/Autodesk/molecular-design-toolkit/blob/5f45a47fea21d3603899a6366cb163024f0e2ec4/moldesign/units/unitsystem.py#L85-L96
|
||
biolab/orange3
|
41685e1c7b1d1babe680113685a2d44bcc9fec0b
|
Orange/widgets/data/owcsvimport.py
|
python
|
_mime_type_for_path
|
(path)
|
return mtype
|
Return the mime type of the file on a local filesystem.
In case the path is a compressed file return the mime type of its contents
Parameters
----------
path : str
Local filesystem path
Returns
-------
mimetype: QMimeType
|
Return the mime type of the file on a local filesystem.
|
[
"Return",
"the",
"mime",
"type",
"of",
"the",
"file",
"on",
"a",
"local",
"filesystem",
"."
] |
def _mime_type_for_path(path):
# type: (str) -> QMimeType
"""
Return the mime type of the file on a local filesystem.
In case the path is a compressed file return the mime type of its contents
Parameters
----------
path : str
Local filesystem path
Returns
-------
mimetype: QMimeType
"""
db = QMimeDatabase()
mtype = db.mimeTypeForFile(path, QMimeDatabase.MatchDefault)
if any(mtype.inherits(t) for t in compression_types):
# peek contents
try:
with _open(path, "rb") as f:
sample = f.read(4096)
except Exception: # pylint: disable=broad-except
sample = b''
mtype = db.mimeTypeForData(sample)
return mtype
|
[
"def",
"_mime_type_for_path",
"(",
"path",
")",
":",
"# type: (str) -> QMimeType",
"db",
"=",
"QMimeDatabase",
"(",
")",
"mtype",
"=",
"db",
".",
"mimeTypeForFile",
"(",
"path",
",",
"QMimeDatabase",
".",
"MatchDefault",
")",
"if",
"any",
"(",
"mtype",
".",
"inherits",
"(",
"t",
")",
"for",
"t",
"in",
"compression_types",
")",
":",
"# peek contents",
"try",
":",
"with",
"_open",
"(",
"path",
",",
"\"rb\"",
")",
"as",
"f",
":",
"sample",
"=",
"f",
".",
"read",
"(",
"4096",
")",
"except",
"Exception",
":",
"# pylint: disable=broad-except",
"sample",
"=",
"b''",
"mtype",
"=",
"db",
".",
"mimeTypeForData",
"(",
"sample",
")",
"return",
"mtype"
] |
https://github.com/biolab/orange3/blob/41685e1c7b1d1babe680113685a2d44bcc9fec0b/Orange/widgets/data/owcsvimport.py#L1421-L1447
|
|
tensorflow/ranking
|
94cccec8b4e71d2cc4489c61e2623522738c2924
|
tensorflow_ranking/python/metrics_impl.py
|
python
|
AlphaDCGMetric._compute_per_list_metric
|
(self, labels, predictions, weights, topn, mask)
|
return tf.compat.v1.math.divide_no_nan(alpha_dcg, per_list_weights)
|
See `_DivRankingMetric`.
|
See `_DivRankingMetric`.
|
[
"See",
"_DivRankingMetric",
"."
] |
def _compute_per_list_metric(self, labels, predictions, weights, topn, mask):
"""See `_DivRankingMetric`."""
sorted_labels, sorted_weights = utils.sort_by_scores(
predictions, [labels, weights], topn=topn, seed=self._seed, mask=mask)
alpha_dcg = _discounted_cumulative_gain(sorted_labels, sorted_weights,
self._gain_fn,
self._rank_discount_fn)
per_list_weights = self._compute_per_list_weights(weights, labels)
return tf.compat.v1.math.divide_no_nan(alpha_dcg, per_list_weights)
|
[
"def",
"_compute_per_list_metric",
"(",
"self",
",",
"labels",
",",
"predictions",
",",
"weights",
",",
"topn",
",",
"mask",
")",
":",
"sorted_labels",
",",
"sorted_weights",
"=",
"utils",
".",
"sort_by_scores",
"(",
"predictions",
",",
"[",
"labels",
",",
"weights",
"]",
",",
"topn",
"=",
"topn",
",",
"seed",
"=",
"self",
".",
"_seed",
",",
"mask",
"=",
"mask",
")",
"alpha_dcg",
"=",
"_discounted_cumulative_gain",
"(",
"sorted_labels",
",",
"sorted_weights",
",",
"self",
".",
"_gain_fn",
",",
"self",
".",
"_rank_discount_fn",
")",
"per_list_weights",
"=",
"self",
".",
"_compute_per_list_weights",
"(",
"weights",
",",
"labels",
")",
"return",
"tf",
".",
"compat",
".",
"v1",
".",
"math",
".",
"divide_no_nan",
"(",
"alpha_dcg",
",",
"per_list_weights",
")"
] |
https://github.com/tensorflow/ranking/blob/94cccec8b4e71d2cc4489c61e2623522738c2924/tensorflow_ranking/python/metrics_impl.py#L766-L774
|
|
sagemath/sage
|
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
|
src/sage/sets/finite_set_maps.py
|
python
|
FiniteSetMaps_MN._from_list_
|
(self, v)
|
return self.element_class(self, v, check=False)
|
EXAMPLES::
sage: M = FiniteSetMaps(4,3)
sage: M._from_list_([2,1,1,0])
[2, 1, 1, 0]
|
EXAMPLES::
|
[
"EXAMPLES",
"::"
] |
def _from_list_(self, v):
"""
EXAMPLES::
sage: M = FiniteSetMaps(4,3)
sage: M._from_list_([2,1,1,0])
[2, 1, 1, 0]
"""
return self.element_class(self, v, check=False)
|
[
"def",
"_from_list_",
"(",
"self",
",",
"v",
")",
":",
"return",
"self",
".",
"element_class",
"(",
"self",
",",
"v",
",",
"check",
"=",
"False",
")"
] |
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/sets/finite_set_maps.py#L346-L354
|
|
rizar/attention-lvcsr
|
1ae52cafdd8419874846f9544a299eef9c758f3b
|
libs/blocks/blocks/model.py
|
python
|
Model.get_top_bricks
|
(self)
|
return self.top_bricks
|
Get the bricks that do not have parents.
Returns
-------
bricks : list of :class:`~blocks.bricks.base.Brick`
|
Get the bricks that do not have parents.
|
[
"Get",
"the",
"bricks",
"that",
"do",
"not",
"have",
"parents",
"."
] |
def get_top_bricks(self):
"""Get the bricks that do not have parents.
Returns
-------
bricks : list of :class:`~blocks.bricks.base.Brick`
"""
return self.top_bricks
|
[
"def",
"get_top_bricks",
"(",
"self",
")",
":",
"return",
"self",
".",
"top_bricks"
] |
https://github.com/rizar/attention-lvcsr/blob/1ae52cafdd8419874846f9544a299eef9c758f3b/libs/blocks/blocks/model.py#L153-L161
|
|
CalebBell/thermo
|
572a47d1b03d49fe609b8d5f826fa6a7cde00828
|
thermo/equilibrium.py
|
python
|
EquilibriumState.G_ideal_gas
|
(self, phase=None)
|
return G_ideal_gas
|
r'''Method to calculate and return the ideal-gas Gibbs free energy of
the phase.
.. math::
G^{ig} = H^{ig} - T S^{ig}
Returns
-------
G_ideal_gas : float
Ideal gas free energy, [J/(mol)]
|
r'''Method to calculate and return the ideal-gas Gibbs free energy of
the phase.
|
[
"r",
"Method",
"to",
"calculate",
"and",
"return",
"the",
"ideal",
"-",
"gas",
"Gibbs",
"free",
"energy",
"of",
"the",
"phase",
"."
] |
def G_ideal_gas(self, phase=None):
r'''Method to calculate and return the ideal-gas Gibbs free energy of
the phase.
.. math::
G^{ig} = H^{ig} - T S^{ig}
Returns
-------
G_ideal_gas : float
Ideal gas free energy, [J/(mol)]
'''
G_ideal_gas = self.H_ideal_gas(phase) - self.T*self.S_ideal_gas(phase)
return G_ideal_gas
|
[
"def",
"G_ideal_gas",
"(",
"self",
",",
"phase",
"=",
"None",
")",
":",
"G_ideal_gas",
"=",
"self",
".",
"H_ideal_gas",
"(",
"phase",
")",
"-",
"self",
".",
"T",
"*",
"self",
".",
"S_ideal_gas",
"(",
"phase",
")",
"return",
"G_ideal_gas"
] |
https://github.com/CalebBell/thermo/blob/572a47d1b03d49fe609b8d5f826fa6a7cde00828/thermo/equilibrium.py#L1301-L1314
|
|
openbci-archive/OpenBCI_Python
|
1a8a0a1f8c9158b6c6a4714d605ba781c2630f64
|
openbci/wifi.py
|
python
|
OpenBCIWiFi.check_connection
|
(self)
|
Check connection quality in term of lag and number of packets drop.
Reinit connection if necessary.
FIXME: parameters given to the board will be lost.
|
Check connection quality in term of lag and number of packets drop.
Reinit connection if necessary.
FIXME: parameters given to the board will be lost.
|
[
"Check",
"connection",
"quality",
"in",
"term",
"of",
"lag",
"and",
"number",
"of",
"packets",
"drop",
".",
"Reinit",
"connection",
"if",
"necessary",
".",
"FIXME",
":",
"parameters",
"given",
"to",
"the",
"board",
"will",
"be",
"lost",
"."
] |
def check_connection(self):
""" Check connection quality in term of lag and number of packets drop.
Reinit connection if necessary.
FIXME: parameters given to the board will be lost.
"""
# stop checking when we're no longer streaming
if not self.streaming:
return
# check number of dropped packets and duration without new packets, deco/reco if too large
if self.packets_dropped > self.max_packets_to_skip:
self.warn("Too many packets dropped, attempt to reconnect")
self.reconnect()
elif self.timeout > 0 and timeit.default_timer() - self.time_last_packet > self.timeout:
self.warn("Too long since got new data, attempt to reconnect")
# if error, attempt to reconect
self.reconnect()
|
[
"def",
"check_connection",
"(",
"self",
")",
":",
"# stop checking when we're no longer streaming",
"if",
"not",
"self",
".",
"streaming",
":",
"return",
"# check number of dropped packets and duration without new packets, deco/reco if too large",
"if",
"self",
".",
"packets_dropped",
">",
"self",
".",
"max_packets_to_skip",
":",
"self",
".",
"warn",
"(",
"\"Too many packets dropped, attempt to reconnect\"",
")",
"self",
".",
"reconnect",
"(",
")",
"elif",
"self",
".",
"timeout",
">",
"0",
"and",
"timeit",
".",
"default_timer",
"(",
")",
"-",
"self",
".",
"time_last_packet",
">",
"self",
".",
"timeout",
":",
"self",
".",
"warn",
"(",
"\"Too long since got new data, attempt to reconnect\"",
")",
"# if error, attempt to reconect",
"self",
".",
"reconnect",
"(",
")"
] |
https://github.com/openbci-archive/OpenBCI_Python/blob/1a8a0a1f8c9158b6c6a4714d605ba781c2630f64/openbci/wifi.py#L591-L606
|
||
memray/seq2seq-keyphrase
|
9145c63ebdc4c3bc431f8091dc52547a46804012
|
emolga/models/encdec.py
|
python
|
Encoder.__init__
|
(self,
config, rng, prefix='enc',
mode='Evaluation', embed=None, use_context=False)
|
Create all elements of the Encoder's Computational graph
|
Create all elements of the Encoder's Computational graph
|
[
"Create",
"all",
"elements",
"of",
"the",
"Encoder",
"s",
"Computational",
"graph"
] |
def __init__(self,
config, rng, prefix='enc',
mode='Evaluation', embed=None, use_context=False):
super(Encoder, self).__init__()
self.config = config
self.rng = rng
self.prefix = prefix
self.mode = mode
self.name = prefix
self.use_context = use_context
self.return_embed = False
self.return_sequence = False
"""
Create all elements of the Encoder's Computational graph
"""
# create Embedding layers
logger.info("{}_create embedding layers.".format(self.prefix))
if embed:
self.Embed = embed
else:
self.Embed = Embedding(
self.config['enc_voc_size'],
self.config['enc_embedd_dim'],
name="{}_embed".format(self.prefix))
self._add(self.Embed)
if self.use_context:
self.Initializer = Dense(
config['enc_contxt_dim'],
config['enc_hidden_dim'],
activation='tanh',
name="{}_init".format(self.prefix)
)
self._add(self.Initializer)
"""
Encoder Core
"""
# create RNN cells
if not self.config['bidirectional']:
logger.info("{}_create RNN cells.".format(self.prefix))
self.RNN = RNN(
self.config['enc_embedd_dim'],
self.config['enc_hidden_dim'],
None if not use_context
else self.config['enc_contxt_dim'],
name="{}_cell".format(self.prefix)
)
self._add(self.RNN)
else:
logger.info("{}_create forward RNN cells.".format(self.prefix))
self.forwardRNN = RNN(
self.config['enc_embedd_dim'],
self.config['enc_hidden_dim'],
None if not use_context
else self.config['enc_contxt_dim'],
name="{}_fw_cell".format(self.prefix)
)
self._add(self.forwardRNN)
logger.info("{}_create backward RNN cells.".format(self.prefix))
self.backwardRNN = RNN(
self.config['enc_embedd_dim'],
self.config['enc_hidden_dim'],
None if not use_context
else self.config['enc_contxt_dim'],
name="{}_bw_cell".format(self.prefix)
)
self._add(self.backwardRNN)
logger.info("create encoder ok.")
|
[
"def",
"__init__",
"(",
"self",
",",
"config",
",",
"rng",
",",
"prefix",
"=",
"'enc'",
",",
"mode",
"=",
"'Evaluation'",
",",
"embed",
"=",
"None",
",",
"use_context",
"=",
"False",
")",
":",
"super",
"(",
"Encoder",
",",
"self",
")",
".",
"__init__",
"(",
")",
"self",
".",
"config",
"=",
"config",
"self",
".",
"rng",
"=",
"rng",
"self",
".",
"prefix",
"=",
"prefix",
"self",
".",
"mode",
"=",
"mode",
"self",
".",
"name",
"=",
"prefix",
"self",
".",
"use_context",
"=",
"use_context",
"self",
".",
"return_embed",
"=",
"False",
"self",
".",
"return_sequence",
"=",
"False",
"# create Embedding layers",
"logger",
".",
"info",
"(",
"\"{}_create embedding layers.\"",
".",
"format",
"(",
"self",
".",
"prefix",
")",
")",
"if",
"embed",
":",
"self",
".",
"Embed",
"=",
"embed",
"else",
":",
"self",
".",
"Embed",
"=",
"Embedding",
"(",
"self",
".",
"config",
"[",
"'enc_voc_size'",
"]",
",",
"self",
".",
"config",
"[",
"'enc_embedd_dim'",
"]",
",",
"name",
"=",
"\"{}_embed\"",
".",
"format",
"(",
"self",
".",
"prefix",
")",
")",
"self",
".",
"_add",
"(",
"self",
".",
"Embed",
")",
"if",
"self",
".",
"use_context",
":",
"self",
".",
"Initializer",
"=",
"Dense",
"(",
"config",
"[",
"'enc_contxt_dim'",
"]",
",",
"config",
"[",
"'enc_hidden_dim'",
"]",
",",
"activation",
"=",
"'tanh'",
",",
"name",
"=",
"\"{}_init\"",
".",
"format",
"(",
"self",
".",
"prefix",
")",
")",
"self",
".",
"_add",
"(",
"self",
".",
"Initializer",
")",
"\"\"\"\n Encoder Core\n \"\"\"",
"# create RNN cells",
"if",
"not",
"self",
".",
"config",
"[",
"'bidirectional'",
"]",
":",
"logger",
".",
"info",
"(",
"\"{}_create RNN cells.\"",
".",
"format",
"(",
"self",
".",
"prefix",
")",
")",
"self",
".",
"RNN",
"=",
"RNN",
"(",
"self",
".",
"config",
"[",
"'enc_embedd_dim'",
"]",
",",
"self",
".",
"config",
"[",
"'enc_hidden_dim'",
"]",
",",
"None",
"if",
"not",
"use_context",
"else",
"self",
".",
"config",
"[",
"'enc_contxt_dim'",
"]",
",",
"name",
"=",
"\"{}_cell\"",
".",
"format",
"(",
"self",
".",
"prefix",
")",
")",
"self",
".",
"_add",
"(",
"self",
".",
"RNN",
")",
"else",
":",
"logger",
".",
"info",
"(",
"\"{}_create forward RNN cells.\"",
".",
"format",
"(",
"self",
".",
"prefix",
")",
")",
"self",
".",
"forwardRNN",
"=",
"RNN",
"(",
"self",
".",
"config",
"[",
"'enc_embedd_dim'",
"]",
",",
"self",
".",
"config",
"[",
"'enc_hidden_dim'",
"]",
",",
"None",
"if",
"not",
"use_context",
"else",
"self",
".",
"config",
"[",
"'enc_contxt_dim'",
"]",
",",
"name",
"=",
"\"{}_fw_cell\"",
".",
"format",
"(",
"self",
".",
"prefix",
")",
")",
"self",
".",
"_add",
"(",
"self",
".",
"forwardRNN",
")",
"logger",
".",
"info",
"(",
"\"{}_create backward RNN cells.\"",
".",
"format",
"(",
"self",
".",
"prefix",
")",
")",
"self",
".",
"backwardRNN",
"=",
"RNN",
"(",
"self",
".",
"config",
"[",
"'enc_embedd_dim'",
"]",
",",
"self",
".",
"config",
"[",
"'enc_hidden_dim'",
"]",
",",
"None",
"if",
"not",
"use_context",
"else",
"self",
".",
"config",
"[",
"'enc_contxt_dim'",
"]",
",",
"name",
"=",
"\"{}_bw_cell\"",
".",
"format",
"(",
"self",
".",
"prefix",
")",
")",
"self",
".",
"_add",
"(",
"self",
".",
"backwardRNN",
")",
"logger",
".",
"info",
"(",
"\"create encoder ok.\"",
")"
] |
https://github.com/memray/seq2seq-keyphrase/blob/9145c63ebdc4c3bc431f8091dc52547a46804012/emolga/models/encdec.py#L183-L255
|
||
ReactionMechanismGenerator/RMG-Py
|
2b7baf51febf27157def58fb3f6cee03fb6a684c
|
rmgpy/reaction.py
|
python
|
Reaction.is_unimolecular
|
(self)
|
return len(self.reactants) == 1 or len(self.products) == 1
|
Return ``True`` if the reaction has a single molecule as either reactant or product (or both)
:math:`\\ce{A <=> B + C}` or :math:`\\ce{A + B <=> C}` or :math:`\\ce{A <=> B}`,
or ``False`` if not.
|
Return ``True`` if the reaction has a single molecule as either reactant or product (or both)
:math:`\\ce{A <=> B + C}` or :math:`\\ce{A + B <=> C}` or :math:`\\ce{A <=> B}`,
or ``False`` if not.
|
[
"Return",
"True",
"if",
"the",
"reaction",
"has",
"a",
"single",
"molecule",
"as",
"either",
"reactant",
"or",
"product",
"(",
"or",
"both",
")",
":",
"math",
":",
"\\\\",
"ce",
"{",
"A",
"<",
"=",
">",
"B",
"+",
"C",
"}",
"or",
":",
"math",
":",
"\\\\",
"ce",
"{",
"A",
"+",
"B",
"<",
"=",
">",
"C",
"}",
"or",
":",
"math",
":",
"\\\\",
"ce",
"{",
"A",
"<",
"=",
">",
"B",
"}",
"or",
"False",
"if",
"not",
"."
] |
def is_unimolecular(self):
"""
Return ``True`` if the reaction has a single molecule as either reactant or product (or both)
:math:`\\ce{A <=> B + C}` or :math:`\\ce{A + B <=> C}` or :math:`\\ce{A <=> B}`,
or ``False`` if not.
"""
return len(self.reactants) == 1 or len(self.products) == 1
|
[
"def",
"is_unimolecular",
"(",
"self",
")",
":",
"return",
"len",
"(",
"self",
".",
"reactants",
")",
"==",
"1",
"or",
"len",
"(",
"self",
".",
"products",
")",
"==",
"1"
] |
https://github.com/ReactionMechanismGenerator/RMG-Py/blob/2b7baf51febf27157def58fb3f6cee03fb6a684c/rmgpy/reaction.py#L380-L386
|
|
hubblestack/hubble
|
763142474edcecdec5fd25591dc29c3536e8f969
|
hubblestack/modules/iptables.py
|
python
|
__virtual__
|
()
|
return True
|
Only load the module if iptables is installed
|
Only load the module if iptables is installed
|
[
"Only",
"load",
"the",
"module",
"if",
"iptables",
"is",
"installed"
] |
def __virtual__():
"""
Only load the module if iptables is installed
"""
if not hubblestack.utils.path.which("iptables"):
return (
False,
"The iptables execution module cannot be loaded: iptables not installed.",
)
return True
|
[
"def",
"__virtual__",
"(",
")",
":",
"if",
"not",
"hubblestack",
".",
"utils",
".",
"path",
".",
"which",
"(",
"\"iptables\"",
")",
":",
"return",
"(",
"False",
",",
"\"The iptables execution module cannot be loaded: iptables not installed.\"",
",",
")",
"return",
"True"
] |
https://github.com/hubblestack/hubble/blob/763142474edcecdec5fd25591dc29c3536e8f969/hubblestack/modules/iptables.py#L113-L123
|
|
open-mmlab/OpenPCDet
|
0f4d3f1f5c1fbe551c35917220e75eb90e28035f
|
pcdet/datasets/augmentor/database_sampler.py
|
python
|
DataBaseSampler.put_boxes_on_road_planes
|
(gt_boxes, road_planes, calib)
|
return gt_boxes, mv_height
|
Only validate in KITTIDataset
Args:
gt_boxes: (N, 7 + C) [x, y, z, dx, dy, dz, heading, ...]
road_planes: [a, b, c, d]
calib:
Returns:
|
Only validate in KITTIDataset
Args:
gt_boxes: (N, 7 + C) [x, y, z, dx, dy, dz, heading, ...]
road_planes: [a, b, c, d]
calib:
|
[
"Only",
"validate",
"in",
"KITTIDataset",
"Args",
":",
"gt_boxes",
":",
"(",
"N",
"7",
"+",
"C",
")",
"[",
"x",
"y",
"z",
"dx",
"dy",
"dz",
"heading",
"...",
"]",
"road_planes",
":",
"[",
"a",
"b",
"c",
"d",
"]",
"calib",
":"
] |
def put_boxes_on_road_planes(gt_boxes, road_planes, calib):
"""
Only validate in KITTIDataset
Args:
gt_boxes: (N, 7 + C) [x, y, z, dx, dy, dz, heading, ...]
road_planes: [a, b, c, d]
calib:
Returns:
"""
a, b, c, d = road_planes
center_cam = calib.lidar_to_rect(gt_boxes[:, 0:3])
cur_height_cam = (-d - a * center_cam[:, 0] - c * center_cam[:, 2]) / b
center_cam[:, 1] = cur_height_cam
cur_lidar_height = calib.rect_to_lidar(center_cam)[:, 2]
mv_height = gt_boxes[:, 2] - gt_boxes[:, 5] / 2 - cur_lidar_height
gt_boxes[:, 2] -= mv_height # lidar view
return gt_boxes, mv_height
|
[
"def",
"put_boxes_on_road_planes",
"(",
"gt_boxes",
",",
"road_planes",
",",
"calib",
")",
":",
"a",
",",
"b",
",",
"c",
",",
"d",
"=",
"road_planes",
"center_cam",
"=",
"calib",
".",
"lidar_to_rect",
"(",
"gt_boxes",
"[",
":",
",",
"0",
":",
"3",
"]",
")",
"cur_height_cam",
"=",
"(",
"-",
"d",
"-",
"a",
"*",
"center_cam",
"[",
":",
",",
"0",
"]",
"-",
"c",
"*",
"center_cam",
"[",
":",
",",
"2",
"]",
")",
"/",
"b",
"center_cam",
"[",
":",
",",
"1",
"]",
"=",
"cur_height_cam",
"cur_lidar_height",
"=",
"calib",
".",
"rect_to_lidar",
"(",
"center_cam",
")",
"[",
":",
",",
"2",
"]",
"mv_height",
"=",
"gt_boxes",
"[",
":",
",",
"2",
"]",
"-",
"gt_boxes",
"[",
":",
",",
"5",
"]",
"/",
"2",
"-",
"cur_lidar_height",
"gt_boxes",
"[",
":",
",",
"2",
"]",
"-=",
"mv_height",
"# lidar view",
"return",
"gt_boxes",
",",
"mv_height"
] |
https://github.com/open-mmlab/OpenPCDet/blob/0f4d3f1f5c1fbe551c35917220e75eb90e28035f/pcdet/datasets/augmentor/database_sampler.py#L137-L154
|
|
linxid/Machine_Learning_Study_Path
|
558e82d13237114bbb8152483977806fc0c222af
|
Machine Learning In Action/Chapter5-LogisticRegression/venv/Lib/site-packages/setuptools/command/easy_install.py
|
python
|
chmod
|
(path, mode)
|
[] |
def chmod(path, mode):
log.debug("changing mode of %s to %o", path, mode)
try:
_chmod(path, mode)
except os.error as e:
log.debug("chmod failed: %s", e)
|
[
"def",
"chmod",
"(",
"path",
",",
"mode",
")",
":",
"log",
".",
"debug",
"(",
"\"changing mode of %s to %o\"",
",",
"path",
",",
"mode",
")",
"try",
":",
"_chmod",
"(",
"path",
",",
"mode",
")",
"except",
"os",
".",
"error",
"as",
"e",
":",
"log",
".",
"debug",
"(",
"\"chmod failed: %s\"",
",",
"e",
")"
] |
https://github.com/linxid/Machine_Learning_Study_Path/blob/558e82d13237114bbb8152483977806fc0c222af/Machine Learning In Action/Chapter5-LogisticRegression/venv/Lib/site-packages/setuptools/command/easy_install.py#L1953-L1958
|
||||
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/numpy-1.16.0-py3.7-macosx-10.9-x86_64.egg/numpy/ma/extras.py
|
python
|
mask_rows
|
(a, axis=None)
|
return mask_rowcols(a, 0)
|
Mask rows of a 2D array that contain masked values.
This function is a shortcut to ``mask_rowcols`` with `axis` equal to 0.
See Also
--------
mask_rowcols : Mask rows and/or columns of a 2D array.
masked_where : Mask where a condition is met.
Examples
--------
>>> import numpy.ma as ma
>>> a = np.zeros((3, 3), dtype=int)
>>> a[1, 1] = 1
>>> a
array([[0, 0, 0],
[0, 1, 0],
[0, 0, 0]])
>>> a = ma.masked_equal(a, 1)
>>> a
masked_array(data =
[[0 0 0]
[0 -- 0]
[0 0 0]],
mask =
[[False False False]
[False True False]
[False False False]],
fill_value=999999)
>>> ma.mask_rows(a)
masked_array(data =
[[0 0 0]
[-- -- --]
[0 0 0]],
mask =
[[False False False]
[ True True True]
[False False False]],
fill_value=999999)
|
Mask rows of a 2D array that contain masked values.
|
[
"Mask",
"rows",
"of",
"a",
"2D",
"array",
"that",
"contain",
"masked",
"values",
"."
] |
def mask_rows(a, axis=None):
"""
Mask rows of a 2D array that contain masked values.
This function is a shortcut to ``mask_rowcols`` with `axis` equal to 0.
See Also
--------
mask_rowcols : Mask rows and/or columns of a 2D array.
masked_where : Mask where a condition is met.
Examples
--------
>>> import numpy.ma as ma
>>> a = np.zeros((3, 3), dtype=int)
>>> a[1, 1] = 1
>>> a
array([[0, 0, 0],
[0, 1, 0],
[0, 0, 0]])
>>> a = ma.masked_equal(a, 1)
>>> a
masked_array(data =
[[0 0 0]
[0 -- 0]
[0 0 0]],
mask =
[[False False False]
[False True False]
[False False False]],
fill_value=999999)
>>> ma.mask_rows(a)
masked_array(data =
[[0 0 0]
[-- -- --]
[0 0 0]],
mask =
[[False False False]
[ True True True]
[False False False]],
fill_value=999999)
"""
return mask_rowcols(a, 0)
|
[
"def",
"mask_rows",
"(",
"a",
",",
"axis",
"=",
"None",
")",
":",
"return",
"mask_rowcols",
"(",
"a",
",",
"0",
")"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/numpy-1.16.0-py3.7-macosx-10.9-x86_64.egg/numpy/ma/extras.py#L918-L961
|
|
kbandla/ImmunityDebugger
|
2abc03fb15c8f3ed0914e1175c4d8933977c73e3
|
1.84/Libs/libstackanalyze.py
|
python
|
StackFunction.getVarsSize
|
(self, offsets)
|
return self.varsSize
|
Get the size of the local vars, checking the difference between the offset
of two consecutives vars.
XXX:An unused local var can make this check unreliable.
@type offsets: LIST
@param offsets: a list of stack's constants
@rtype: DICTIONARY
@return: the key is the stack's constant, value is the size
|
Get the size of the local vars, checking the difference between the offset
of two consecutives vars.
XXX:An unused local var can make this check unreliable.
|
[
"Get",
"the",
"size",
"of",
"the",
"local",
"vars",
"checking",
"the",
"difference",
"between",
"the",
"offset",
"of",
"two",
"consecutives",
"vars",
".",
"XXX",
":",
"An",
"unused",
"local",
"var",
"can",
"make",
"this",
"check",
"unreliable",
"."
] |
def getVarsSize(self, offsets):
"""
Get the size of the local vars, checking the difference between the offset
of two consecutives vars.
XXX:An unused local var can make this check unreliable.
@type offsets: LIST
@param offsets: a list of stack's constants
@rtype: DICTIONARY
@return: the key is the stack's constant, value is the size
"""
self.varsSize = {}
offsets.sort()
last = 0
for off in offsets:
size = off - last
last = off
self.varsSize[off] = size
return self.varsSize
|
[
"def",
"getVarsSize",
"(",
"self",
",",
"offsets",
")",
":",
"self",
".",
"varsSize",
"=",
"{",
"}",
"offsets",
".",
"sort",
"(",
")",
"last",
"=",
"0",
"for",
"off",
"in",
"offsets",
":",
"size",
"=",
"off",
"-",
"last",
"last",
"=",
"off",
"self",
".",
"varsSize",
"[",
"off",
"]",
"=",
"size",
"return",
"self",
".",
"varsSize"
] |
https://github.com/kbandla/ImmunityDebugger/blob/2abc03fb15c8f3ed0914e1175c4d8933977c73e3/1.84/Libs/libstackanalyze.py#L391-L412
|
|
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/benchmarks/src/benchmarks/sympy/doc/ext/docscrape.py
|
python
|
Reader.is_empty
|
(self)
|
return not ''.join(self._str).strip()
|
[] |
def is_empty(self):
return not ''.join(self._str).strip()
|
[
"def",
"is_empty",
"(",
"self",
")",
":",
"return",
"not",
"''",
".",
"join",
"(",
"self",
".",
"_str",
")",
".",
"strip",
"(",
")"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/doc/ext/docscrape.py#L83-L84
|
|||
openedx/edx-platform
|
68dd185a0ab45862a2a61e0f803d7e03d2be71b5
|
openedx/features/course_experience/views/course_outline.py
|
python
|
CourseOutlineFragmentView.user_enrolled_after_completion_collection
|
(self, user, course_key)
|
Checks that the user has enrolled in the course after 01/24/2018, the date that
the completion API began data collection. If the user has enrolled in the course
before this date, they may see incomplete collection data. This is a temporary
check until all active enrollments are created after the date.
|
Checks that the user has enrolled in the course after 01/24/2018, the date that
the completion API began data collection. If the user has enrolled in the course
before this date, they may see incomplete collection data. This is a temporary
check until all active enrollments are created after the date.
|
[
"Checks",
"that",
"the",
"user",
"has",
"enrolled",
"in",
"the",
"course",
"after",
"01",
"/",
"24",
"/",
"2018",
"the",
"date",
"that",
"the",
"completion",
"API",
"began",
"data",
"collection",
".",
"If",
"the",
"user",
"has",
"enrolled",
"in",
"the",
"course",
"before",
"this",
"date",
"they",
"may",
"see",
"incomplete",
"collection",
"data",
".",
"This",
"is",
"a",
"temporary",
"check",
"until",
"all",
"active",
"enrollments",
"are",
"created",
"after",
"the",
"date",
"."
] |
def user_enrolled_after_completion_collection(self, user, course_key):
"""
Checks that the user has enrolled in the course after 01/24/2018, the date that
the completion API began data collection. If the user has enrolled in the course
before this date, they may see incomplete collection data. This is a temporary
check until all active enrollments are created after the date.
"""
user = User.objects.get(username=user)
try:
user_enrollment = CourseEnrollment.objects.get(
user=user,
course_id=course_key,
is_active=True
)
return user_enrollment.created > self._completion_data_collection_start()
except CourseEnrollment.DoesNotExist:
return False
|
[
"def",
"user_enrolled_after_completion_collection",
"(",
"self",
",",
"user",
",",
"course_key",
")",
":",
"user",
"=",
"User",
".",
"objects",
".",
"get",
"(",
"username",
"=",
"user",
")",
"try",
":",
"user_enrollment",
"=",
"CourseEnrollment",
".",
"objects",
".",
"get",
"(",
"user",
"=",
"user",
",",
"course_id",
"=",
"course_key",
",",
"is_active",
"=",
"True",
")",
"return",
"user_enrollment",
".",
"created",
">",
"self",
".",
"_completion_data_collection_start",
"(",
")",
"except",
"CourseEnrollment",
".",
"DoesNotExist",
":",
"return",
"False"
] |
https://github.com/openedx/edx-platform/blob/68dd185a0ab45862a2a61e0f803d7e03d2be71b5/openedx/features/course_experience/views/course_outline.py#L134-L150
|
||
R0uter/ss.conf-for-surge
|
920985fc052b72969762cc70c9c76d9abbe2aa72
|
ssconf.py
|
python
|
white_list_check
|
()
|
[] |
def white_list_check():
dnsmasq_china_list = 'https://r0uter.github.io/gfw_domain_whitelist/whitelist.pac'
try:
content = get_list(dnsmasq_china_list)
content = content.decode('utf-8')
f = codecs.open('./list/whitelist', 'w', 'utf-8')
f.write(content)
f.close()
except:
print('Get list update failed,use cache to update instead.')
whitelist = codecs.open('./list/whitelist','r','utf-8')
whitelistTxt = codecs.open('./list/whitelist.txt','w','utf-8')
whitelistTxt.write('// updated on ' + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S" + '\n'))
# Write list
for line in whitelist.readlines():
domain = re.findall(r'(?<=")[a-z0-9|\-]+\.\w+', line)
if len(domain) > 0:
whitelistTxt.write('DOMAIN-SUFFIX,%s,DIRECT\n'%(domain[0]))
whitelist.close()
whitelistTxt.close()
|
[
"def",
"white_list_check",
"(",
")",
":",
"dnsmasq_china_list",
"=",
"'https://r0uter.github.io/gfw_domain_whitelist/whitelist.pac'",
"try",
":",
"content",
"=",
"get_list",
"(",
"dnsmasq_china_list",
")",
"content",
"=",
"content",
".",
"decode",
"(",
"'utf-8'",
")",
"f",
"=",
"codecs",
".",
"open",
"(",
"'./list/whitelist'",
",",
"'w'",
",",
"'utf-8'",
")",
"f",
".",
"write",
"(",
"content",
")",
"f",
".",
"close",
"(",
")",
"except",
":",
"print",
"(",
"'Get list update failed,use cache to update instead.'",
")",
"whitelist",
"=",
"codecs",
".",
"open",
"(",
"'./list/whitelist'",
",",
"'r'",
",",
"'utf-8'",
")",
"whitelistTxt",
"=",
"codecs",
".",
"open",
"(",
"'./list/whitelist.txt'",
",",
"'w'",
",",
"'utf-8'",
")",
"whitelistTxt",
".",
"write",
"(",
"'// updated on '",
"+",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
".",
"strftime",
"(",
"\"%Y-%m-%d %H:%M:%S\"",
"+",
"'\\n'",
")",
")",
"# Write list",
"for",
"line",
"in",
"whitelist",
".",
"readlines",
"(",
")",
":",
"domain",
"=",
"re",
".",
"findall",
"(",
"r'(?<=\")[a-z0-9|\\-]+\\.\\w+'",
",",
"line",
")",
"if",
"len",
"(",
"domain",
")",
">",
"0",
":",
"whitelistTxt",
".",
"write",
"(",
"'DOMAIN-SUFFIX,%s,DIRECT\\n'",
"%",
"(",
"domain",
"[",
"0",
"]",
")",
")",
"whitelist",
".",
"close",
"(",
")",
"whitelistTxt",
".",
"close",
"(",
")"
] |
https://github.com/R0uter/ss.conf-for-surge/blob/920985fc052b72969762cc70c9c76d9abbe2aa72/ssconf.py#L25-L48
|
||||
coderholic/pyradio
|
cd3ee2d6b369fedfd009371a59aca23ab39b020f
|
pyradio/browser.py
|
python
|
RadioBrowserSearchWindow.focus
|
(self)
|
return self._focus
|
[] |
def focus(self):
return self._focus
|
[
"def",
"focus",
"(",
"self",
")",
":",
"return",
"self",
".",
"_focus"
] |
https://github.com/coderholic/pyradio/blob/cd3ee2d6b369fedfd009371a59aca23ab39b020f/pyradio/browser.py#L1789-L1790
|
|||
OpenMDAO/OpenMDAO
|
f47eb5485a0bb5ea5d2ae5bd6da4b94dc6b296bd
|
openmdao/utils/units.py
|
python
|
PhysicalUnit.name
|
(self)
|
return num + denom
|
Compute the name of this unit.
Returns
-------
str
String representation of the unit.
|
Compute the name of this unit.
|
[
"Compute",
"the",
"name",
"of",
"this",
"unit",
"."
] |
def name(self):
"""
Compute the name of this unit.
Returns
-------
str
String representation of the unit.
"""
num = ''
denom = ''
for unit, power in self._names.items():
if power < 0:
denom = denom + '/' + unit
if power < -1:
denom = denom + '**' + str(-power)
elif power > 0:
num = num + '*' + unit
if power > 1:
num = num + '**' + str(power)
if len(num) == 0:
num = '1'
else:
num = num[1:]
return num + denom
|
[
"def",
"name",
"(",
"self",
")",
":",
"num",
"=",
"''",
"denom",
"=",
"''",
"for",
"unit",
",",
"power",
"in",
"self",
".",
"_names",
".",
"items",
"(",
")",
":",
"if",
"power",
"<",
"0",
":",
"denom",
"=",
"denom",
"+",
"'/'",
"+",
"unit",
"if",
"power",
"<",
"-",
"1",
":",
"denom",
"=",
"denom",
"+",
"'**'",
"+",
"str",
"(",
"-",
"power",
")",
"elif",
"power",
">",
"0",
":",
"num",
"=",
"num",
"+",
"'*'",
"+",
"unit",
"if",
"power",
">",
"1",
":",
"num",
"=",
"num",
"+",
"'**'",
"+",
"str",
"(",
"power",
")",
"if",
"len",
"(",
"num",
")",
"==",
"0",
":",
"num",
"=",
"'1'",
"else",
":",
"num",
"=",
"num",
"[",
"1",
":",
"]",
"return",
"num",
"+",
"denom"
] |
https://github.com/OpenMDAO/OpenMDAO/blob/f47eb5485a0bb5ea5d2ae5bd6da4b94dc6b296bd/openmdao/utils/units.py#L566-L590
|
|
mila-iqia/myia
|
56774a39579b4ec4123f44843ad4ca688acc859b
|
myia/compile/backends/__init__.py
|
python
|
Converter.convert_bool
|
(self, v, t)
|
Convert boolean values.
|
Convert boolean values.
|
[
"Convert",
"boolean",
"values",
"."
] |
def convert_bool(self, v, t):
"""Convert boolean values."""
raise NotImplementedError("convert_bool")
|
[
"def",
"convert_bool",
"(",
"self",
",",
"v",
",",
"t",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"convert_bool\"",
")"
] |
https://github.com/mila-iqia/myia/blob/56774a39579b4ec4123f44843ad4ca688acc859b/myia/compile/backends/__init__.py#L272-L274
|
||
DataDog/integrations-core
|
934674b29d94b70ccc008f76ea172d0cdae05e1e
|
datadog_checks_dev/datadog_checks/dev/tooling/utils.py
|
python
|
get_check_directory
|
(check_name)
|
return os.path.join(get_root(), check_name, 'datadog_checks', check_name)
|
[] |
def get_check_directory(check_name):
return os.path.join(get_root(), check_name, 'datadog_checks', check_name)
|
[
"def",
"get_check_directory",
"(",
"check_name",
")",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"get_root",
"(",
")",
",",
"check_name",
",",
"'datadog_checks'",
",",
"check_name",
")"
] |
https://github.com/DataDog/integrations-core/blob/934674b29d94b70ccc008f76ea172d0cdae05e1e/datadog_checks_dev/datadog_checks/dev/tooling/utils.py#L298-L299
|
|||
TengXiaoDai/DistributedCrawling
|
f5c2439e6ce68dd9b49bde084d76473ff9ed4963
|
Lib/site-packages/pkg_resources/__init__.py
|
python
|
find_distributions
|
(path_item, only=False)
|
return finder(importer, path_item, only)
|
Yield distributions accessible via `path_item`
|
Yield distributions accessible via `path_item`
|
[
"Yield",
"distributions",
"accessible",
"via",
"path_item"
] |
def find_distributions(path_item, only=False):
"""Yield distributions accessible via `path_item`"""
importer = get_importer(path_item)
finder = _find_adapter(_distribution_finders, importer)
return finder(importer, path_item, only)
|
[
"def",
"find_distributions",
"(",
"path_item",
",",
"only",
"=",
"False",
")",
":",
"importer",
"=",
"get_importer",
"(",
"path_item",
")",
"finder",
"=",
"_find_adapter",
"(",
"_distribution_finders",
",",
"importer",
")",
"return",
"finder",
"(",
"importer",
",",
"path_item",
",",
"only",
")"
] |
https://github.com/TengXiaoDai/DistributedCrawling/blob/f5c2439e6ce68dd9b49bde084d76473ff9ed4963/Lib/site-packages/pkg_resources/__init__.py#L1933-L1937
|
|
tensorflow/models
|
6b8bb0cbeb3e10415c7a87448f08adc3c484c1d3
|
research/slim/nets/vgg.py
|
python
|
vgg_16
|
(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
reuse=None,
scope='vgg_16',
fc_conv_padding='VALID',
global_pool=False)
|
Oxford Net VGG 16-Layers version D Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes. If 0 or None, the logits layer is
omitted and the input features to the logits layer are returned instead.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional scope for the variables.
fc_conv_padding: the type of padding to use for the fully connected layer
that is implemented as a convolutional layer. Use 'SAME' padding if you
are applying the network in a fully convolutional manner and want to
get a prediction map downsampled by a factor of 32 as an output.
Otherwise, the output prediction map will be (input / 32) - 6 in case of
'VALID' padding.
global_pool: Optional boolean flag. If True, the input to the classification
layer is avgpooled to size 1x1, for any input size. (This is not part
of the original VGG architecture.)
Returns:
net: the output of the logits layer (if num_classes is a non-zero integer),
or the input to the logits layer (if num_classes is 0 or None).
end_points: a dict of tensors with intermediate activations.
|
Oxford Net VGG 16-Layers version D Example.
|
[
"Oxford",
"Net",
"VGG",
"16",
"-",
"Layers",
"version",
"D",
"Example",
"."
] |
def vgg_16(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
reuse=None,
scope='vgg_16',
fc_conv_padding='VALID',
global_pool=False):
"""Oxford Net VGG 16-Layers version D Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes. If 0 or None, the logits layer is
omitted and the input features to the logits layer are returned instead.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional scope for the variables.
fc_conv_padding: the type of padding to use for the fully connected layer
that is implemented as a convolutional layer. Use 'SAME' padding if you
are applying the network in a fully convolutional manner and want to
get a prediction map downsampled by a factor of 32 as an output.
Otherwise, the output prediction map will be (input / 32) - 6 in case of
'VALID' padding.
global_pool: Optional boolean flag. If True, the input to the classification
layer is avgpooled to size 1x1, for any input size. (This is not part
of the original VGG architecture.)
Returns:
net: the output of the logits layer (if num_classes is a non-zero integer),
or the input to the logits layer (if num_classes is 0 or None).
end_points: a dict of tensors with intermediate activations.
"""
with tf.variable_scope(
scope, 'vgg_16', [inputs], reuse=reuse) as sc:
end_points_collection = sc.original_name_scope + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 3, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding=fc_conv_padding, scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if global_pool:
net = tf.reduce_mean(
input_tensor=net, axis=[1, 2], keepdims=True, name='global_pool')
end_points['global_pool'] = net
if num_classes:
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
|
[
"def",
"vgg_16",
"(",
"inputs",
",",
"num_classes",
"=",
"1000",
",",
"is_training",
"=",
"True",
",",
"dropout_keep_prob",
"=",
"0.5",
",",
"spatial_squeeze",
"=",
"True",
",",
"reuse",
"=",
"None",
",",
"scope",
"=",
"'vgg_16'",
",",
"fc_conv_padding",
"=",
"'VALID'",
",",
"global_pool",
"=",
"False",
")",
":",
"with",
"tf",
".",
"variable_scope",
"(",
"scope",
",",
"'vgg_16'",
",",
"[",
"inputs",
"]",
",",
"reuse",
"=",
"reuse",
")",
"as",
"sc",
":",
"end_points_collection",
"=",
"sc",
".",
"original_name_scope",
"+",
"'_end_points'",
"# Collect outputs for conv2d, fully_connected and max_pool2d.",
"with",
"slim",
".",
"arg_scope",
"(",
"[",
"slim",
".",
"conv2d",
",",
"slim",
".",
"fully_connected",
",",
"slim",
".",
"max_pool2d",
"]",
",",
"outputs_collections",
"=",
"end_points_collection",
")",
":",
"net",
"=",
"slim",
".",
"repeat",
"(",
"inputs",
",",
"2",
",",
"slim",
".",
"conv2d",
",",
"64",
",",
"[",
"3",
",",
"3",
"]",
",",
"scope",
"=",
"'conv1'",
")",
"net",
"=",
"slim",
".",
"max_pool2d",
"(",
"net",
",",
"[",
"2",
",",
"2",
"]",
",",
"scope",
"=",
"'pool1'",
")",
"net",
"=",
"slim",
".",
"repeat",
"(",
"net",
",",
"2",
",",
"slim",
".",
"conv2d",
",",
"128",
",",
"[",
"3",
",",
"3",
"]",
",",
"scope",
"=",
"'conv2'",
")",
"net",
"=",
"slim",
".",
"max_pool2d",
"(",
"net",
",",
"[",
"2",
",",
"2",
"]",
",",
"scope",
"=",
"'pool2'",
")",
"net",
"=",
"slim",
".",
"repeat",
"(",
"net",
",",
"3",
",",
"slim",
".",
"conv2d",
",",
"256",
",",
"[",
"3",
",",
"3",
"]",
",",
"scope",
"=",
"'conv3'",
")",
"net",
"=",
"slim",
".",
"max_pool2d",
"(",
"net",
",",
"[",
"2",
",",
"2",
"]",
",",
"scope",
"=",
"'pool3'",
")",
"net",
"=",
"slim",
".",
"repeat",
"(",
"net",
",",
"3",
",",
"slim",
".",
"conv2d",
",",
"512",
",",
"[",
"3",
",",
"3",
"]",
",",
"scope",
"=",
"'conv4'",
")",
"net",
"=",
"slim",
".",
"max_pool2d",
"(",
"net",
",",
"[",
"2",
",",
"2",
"]",
",",
"scope",
"=",
"'pool4'",
")",
"net",
"=",
"slim",
".",
"repeat",
"(",
"net",
",",
"3",
",",
"slim",
".",
"conv2d",
",",
"512",
",",
"[",
"3",
",",
"3",
"]",
",",
"scope",
"=",
"'conv5'",
")",
"net",
"=",
"slim",
".",
"max_pool2d",
"(",
"net",
",",
"[",
"2",
",",
"2",
"]",
",",
"scope",
"=",
"'pool5'",
")",
"# Use conv2d instead of fully_connected layers.",
"net",
"=",
"slim",
".",
"conv2d",
"(",
"net",
",",
"4096",
",",
"[",
"7",
",",
"7",
"]",
",",
"padding",
"=",
"fc_conv_padding",
",",
"scope",
"=",
"'fc6'",
")",
"net",
"=",
"slim",
".",
"dropout",
"(",
"net",
",",
"dropout_keep_prob",
",",
"is_training",
"=",
"is_training",
",",
"scope",
"=",
"'dropout6'",
")",
"net",
"=",
"slim",
".",
"conv2d",
"(",
"net",
",",
"4096",
",",
"[",
"1",
",",
"1",
"]",
",",
"scope",
"=",
"'fc7'",
")",
"# Convert end_points_collection into a end_point dict.",
"end_points",
"=",
"slim",
".",
"utils",
".",
"convert_collection_to_dict",
"(",
"end_points_collection",
")",
"if",
"global_pool",
":",
"net",
"=",
"tf",
".",
"reduce_mean",
"(",
"input_tensor",
"=",
"net",
",",
"axis",
"=",
"[",
"1",
",",
"2",
"]",
",",
"keepdims",
"=",
"True",
",",
"name",
"=",
"'global_pool'",
")",
"end_points",
"[",
"'global_pool'",
"]",
"=",
"net",
"if",
"num_classes",
":",
"net",
"=",
"slim",
".",
"dropout",
"(",
"net",
",",
"dropout_keep_prob",
",",
"is_training",
"=",
"is_training",
",",
"scope",
"=",
"'dropout7'",
")",
"net",
"=",
"slim",
".",
"conv2d",
"(",
"net",
",",
"num_classes",
",",
"[",
"1",
",",
"1",
"]",
",",
"activation_fn",
"=",
"None",
",",
"normalizer_fn",
"=",
"None",
",",
"scope",
"=",
"'fc8'",
")",
"if",
"spatial_squeeze",
":",
"net",
"=",
"tf",
".",
"squeeze",
"(",
"net",
",",
"[",
"1",
",",
"2",
"]",
",",
"name",
"=",
"'fc8/squeezed'",
")",
"end_points",
"[",
"sc",
".",
"name",
"+",
"'/fc8'",
"]",
"=",
"net",
"return",
"net",
",",
"end_points"
] |
https://github.com/tensorflow/models/blob/6b8bb0cbeb3e10415c7a87448f08adc3c484c1d3/research/slim/nets/vgg.py#L147-L226
|
||
limodou/ulipad
|
4c7d590234f39cac80bb1d36dca095b646e287fb
|
modules/wxctrl/FlatNotebook.py
|
python
|
TabNavigatorWindow.OnKeyUp
|
(self, event)
|
Handles the wx.EVT_KEY_UP for the L{TabNavigatorWindow}.
|
Handles the wx.EVT_KEY_UP for the L{TabNavigatorWindow}.
|
[
"Handles",
"the",
"wx",
".",
"EVT_KEY_UP",
"for",
"the",
"L",
"{",
"TabNavigatorWindow",
"}",
"."
] |
def OnKeyUp(self, event):
"""Handles the wx.EVT_KEY_UP for the L{TabNavigatorWindow}."""
if event.GetKeyCode() == wx.WXK_CONTROL:
self.CloseDialog()
|
[
"def",
"OnKeyUp",
"(",
"self",
",",
"event",
")",
":",
"if",
"event",
".",
"GetKeyCode",
"(",
")",
"==",
"wx",
".",
"WXK_CONTROL",
":",
"self",
".",
"CloseDialog",
"(",
")"
] |
https://github.com/limodou/ulipad/blob/4c7d590234f39cac80bb1d36dca095b646e287fb/modules/wxctrl/FlatNotebook.py#L1148-L1152
|
||
kupferlauncher/kupfer
|
1c1e9bcbce05a82f503f68f8b3955c20b02639b3
|
kupfer/core/plugins.py
|
python
|
get_plugin_desc
|
()
|
return "\n".join(desc)
|
Return a formatted list of plugins suitable for printing to terminal
|
Return a formatted list of plugins suitable for printing to terminal
|
[
"Return",
"a",
"formatted",
"list",
"of",
"plugins",
"suitable",
"for",
"printing",
"to",
"terminal"
] |
def get_plugin_desc():
"""Return a formatted list of plugins suitable for printing to terminal"""
import textwrap
infos = list(get_plugin_info())
verlen = max(len(r["version"]) for r in infos)
idlen = max(len(r["name"]) for r in infos)
maxlen = 78
left_margin = 2 + idlen + 1 + verlen + 1
desc = []
for rec in infos:
# Wrap the description and align continued lines
wrapped = textwrap.wrap(rec["description"], maxlen - left_margin)
description = ("\n" + " "*left_margin).join(wrapped)
desc.append(" %s %s %s" %
(
rec["name"].ljust(idlen),
rec["version"].ljust(verlen),
description,
))
return "\n".join(desc)
|
[
"def",
"get_plugin_desc",
"(",
")",
":",
"import",
"textwrap",
"infos",
"=",
"list",
"(",
"get_plugin_info",
"(",
")",
")",
"verlen",
"=",
"max",
"(",
"len",
"(",
"r",
"[",
"\"version\"",
"]",
")",
"for",
"r",
"in",
"infos",
")",
"idlen",
"=",
"max",
"(",
"len",
"(",
"r",
"[",
"\"name\"",
"]",
")",
"for",
"r",
"in",
"infos",
")",
"maxlen",
"=",
"78",
"left_margin",
"=",
"2",
"+",
"idlen",
"+",
"1",
"+",
"verlen",
"+",
"1",
"desc",
"=",
"[",
"]",
"for",
"rec",
"in",
"infos",
":",
"# Wrap the description and align continued lines",
"wrapped",
"=",
"textwrap",
".",
"wrap",
"(",
"rec",
"[",
"\"description\"",
"]",
",",
"maxlen",
"-",
"left_margin",
")",
"description",
"=",
"(",
"\"\\n\"",
"+",
"\" \"",
"*",
"left_margin",
")",
".",
"join",
"(",
"wrapped",
")",
"desc",
".",
"append",
"(",
"\" %s %s %s\"",
"%",
"(",
"rec",
"[",
"\"name\"",
"]",
".",
"ljust",
"(",
"idlen",
")",
",",
"rec",
"[",
"\"version\"",
"]",
".",
"ljust",
"(",
"verlen",
")",
",",
"description",
",",
")",
")",
"return",
"\"\\n\"",
".",
"join",
"(",
"desc",
")"
] |
https://github.com/kupferlauncher/kupfer/blob/1c1e9bcbce05a82f503f68f8b3955c20b02639b3/kupfer/core/plugins.py#L90-L109
|
|
bitcraze/crazyflie-clients-python
|
65d433a945b097333e5681a937354045dd4b66f4
|
src/cfclient/ui/dialogs/logconfigdialogue.py
|
python
|
LogConfigDialogue._item_selected
|
(self)
|
Opens the log configuration of the pressed
item in the category-tree.
|
Opens the log configuration of the pressed
item in the category-tree.
|
[
"Opens",
"the",
"log",
"configuration",
"of",
"the",
"pressed",
"item",
"in",
"the",
"category",
"-",
"tree",
"."
] |
def _item_selected(self):
""" Opens the log configuration of the pressed
item in the category-tree. """
items = self.categoryTree.selectedItems()
if items:
config = items[0]
category = config.parent()
if category:
self._loadConfig(category.text(NAME_FIELD),
config.text(NAME_FIELD))
else:
# if category is None, it's the category that's clicked
self._clear_trees_and_progressbar()
|
[
"def",
"_item_selected",
"(",
"self",
")",
":",
"items",
"=",
"self",
".",
"categoryTree",
".",
"selectedItems",
"(",
")",
"if",
"items",
":",
"config",
"=",
"items",
"[",
"0",
"]",
"category",
"=",
"config",
".",
"parent",
"(",
")",
"if",
"category",
":",
"self",
".",
"_loadConfig",
"(",
"category",
".",
"text",
"(",
"NAME_FIELD",
")",
",",
"config",
".",
"text",
"(",
"NAME_FIELD",
")",
")",
"else",
":",
"# if category is None, it's the category that's clicked",
"self",
".",
"_clear_trees_and_progressbar",
"(",
")"
] |
https://github.com/bitcraze/crazyflie-clients-python/blob/65d433a945b097333e5681a937354045dd4b66f4/src/cfclient/ui/dialogs/logconfigdialogue.py#L284-L297
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.