nwo
stringlengths 5
106
| sha
stringlengths 40
40
| path
stringlengths 4
174
| language
stringclasses 1
value | identifier
stringlengths 1
140
| parameters
stringlengths 0
87.7k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
426k
| docstring
stringlengths 0
64.3k
| docstring_summary
stringlengths 0
26.3k
| docstring_tokens
list | function
stringlengths 18
4.83M
| function_tokens
list | url
stringlengths 83
304
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cclib/cclib
|
81cd4a81cc4a3bbed7016b3e417ca9bff8ad3a92
|
cclib/io/wfxwriter.py
|
python
|
WFXWriter._mo_energies
|
(self)
|
return mo_energies
|
Section: Molecular Orbital Energies.
|
Section: Molecular Orbital Energies.
|
[
"Section",
":",
"Molecular",
"Orbital",
"Energies",
"."
] |
def _mo_energies(self):
"""Section: Molecular Orbital Energies."""
mo_energies = []
alpha_elctrons = self._no_alpha_electrons()
beta_electrons = self._no_beta_electrons()
for mo_energy in self.ccdata.moenergies[0][:alpha_elctrons]:
mo_energies.append(WFX_FIELD_FMT % (
utils.convertor(mo_energy, 'eV', 'hartree')))
if self.ccdata.mult > 1:
for mo_energy in self.ccdata.moenergies[1][:beta_electrons]:
mo_energies.append(WFX_FIELD_FMT % (
utils.convertor(mo_energy, 'eV', 'hartree')))
return mo_energies
|
[
"def",
"_mo_energies",
"(",
"self",
")",
":",
"mo_energies",
"=",
"[",
"]",
"alpha_elctrons",
"=",
"self",
".",
"_no_alpha_electrons",
"(",
")",
"beta_electrons",
"=",
"self",
".",
"_no_beta_electrons",
"(",
")",
"for",
"mo_energy",
"in",
"self",
".",
"ccdata",
".",
"moenergies",
"[",
"0",
"]",
"[",
":",
"alpha_elctrons",
"]",
":",
"mo_energies",
".",
"append",
"(",
"WFX_FIELD_FMT",
"%",
"(",
"utils",
".",
"convertor",
"(",
"mo_energy",
",",
"'eV'",
",",
"'hartree'",
")",
")",
")",
"if",
"self",
".",
"ccdata",
".",
"mult",
">",
"1",
":",
"for",
"mo_energy",
"in",
"self",
".",
"ccdata",
".",
"moenergies",
"[",
"1",
"]",
"[",
":",
"beta_electrons",
"]",
":",
"mo_energies",
".",
"append",
"(",
"WFX_FIELD_FMT",
"%",
"(",
"utils",
".",
"convertor",
"(",
"mo_energy",
",",
"'eV'",
",",
"'hartree'",
")",
")",
")",
"return",
"mo_energies"
] |
https://github.com/cclib/cclib/blob/81cd4a81cc4a3bbed7016b3e417ca9bff8ad3a92/cclib/io/wfxwriter.py#L292-L304
|
|
openhatch/oh-mainline
|
ce29352a034e1223141dcc2f317030bbc3359a51
|
vendor/packages/celery/celery/result.py
|
python
|
BaseAsyncResult.successful
|
(self)
|
return self.status == states.SUCCESS
|
Returns :const:`True` if the task executed successfully.
|
Returns :const:`True` if the task executed successfully.
|
[
"Returns",
":",
"const",
":",
"True",
"if",
"the",
"task",
"executed",
"successfully",
"."
] |
def successful(self):
"""Returns :const:`True` if the task executed successfully."""
return self.status == states.SUCCESS
|
[
"def",
"successful",
"(",
"self",
")",
":",
"return",
"self",
".",
"status",
"==",
"states",
".",
"SUCCESS"
] |
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/celery/celery/result.py#L110-L112
|
|
mardix/assembly
|
4c993d19bc9d33c1641323e03231e9ecad711b38
|
assembly/utils.py
|
python
|
list_replace
|
(subject_list, replacement, string)
|
return string
|
To replace a list of items by a single replacement
:param subject_list: list
:param replacement: string
:param string: string
:return: string
|
To replace a list of items by a single replacement
:param subject_list: list
:param replacement: string
:param string: string
:return: string
|
[
"To",
"replace",
"a",
"list",
"of",
"items",
"by",
"a",
"single",
"replacement",
":",
"param",
"subject_list",
":",
"list",
":",
"param",
"replacement",
":",
"string",
":",
"param",
"string",
":",
"string",
":",
"return",
":",
"string"
] |
def list_replace(subject_list, replacement, string):
"""
To replace a list of items by a single replacement
:param subject_list: list
:param replacement: string
:param string: string
:return: string
"""
for s in subject_list:
string = string.replace(s, replacement)
return string
|
[
"def",
"list_replace",
"(",
"subject_list",
",",
"replacement",
",",
"string",
")",
":",
"for",
"s",
"in",
"subject_list",
":",
"string",
"=",
"string",
".",
"replace",
"(",
"s",
",",
"replacement",
")",
"return",
"string"
] |
https://github.com/mardix/assembly/blob/4c993d19bc9d33c1641323e03231e9ecad711b38/assembly/utils.py#L191-L201
|
|
behave/behave
|
e6364fe3d62c2befe34bc56471cfb317a218cd01
|
behave4cmd0/command_shell.py
|
python
|
Command.run
|
(cls, command, cwd=".", **kwargs)
|
return command_result
|
Make a subprocess call, collect its output and returncode.
Returns CommandResult instance as ValueObject.
|
Make a subprocess call, collect its output and returncode.
Returns CommandResult instance as ValueObject.
|
[
"Make",
"a",
"subprocess",
"call",
"collect",
"its",
"output",
"and",
"returncode",
".",
"Returns",
"CommandResult",
"instance",
"as",
"ValueObject",
"."
] |
def run(cls, command, cwd=".", **kwargs):
"""
Make a subprocess call, collect its output and returncode.
Returns CommandResult instance as ValueObject.
"""
assert isinstance(command, six.string_types)
command_result = CommandResult()
command_result.command = command
use_shell = cls.USE_SHELL
if "shell" in kwargs:
use_shell = kwargs.pop("shell")
# -- BUILD COMMAND ARGS:
if six.PY2 and isinstance(command, six.text_type):
# -- PREPARE-FOR: shlex.split()
# In PY2, shlex.split() requires bytes string (non-unicode).
# In PY3, shlex.split() accepts unicode string.
command = codecs.encode(command, "utf-8")
cmdargs = shlex.split(command)
# -- TRANSFORM COMMAND (optional)
command0 = cmdargs[0]
real_command = cls.COMMAND_MAP.get(command0, None)
if real_command:
cmdargs0 = real_command.split()
cmdargs = cmdargs0 + cmdargs[1:]
preprocessors = cls.PREPROCESSOR_MAP.get(command0)
if preprocessors:
cmdargs = cls.preprocess_command(preprocessors, cmdargs, command, cwd)
# -- RUN COMMAND:
try:
process = subprocess.Popen(cmdargs,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
shell=use_shell,
cwd=cwd, **kwargs)
out, err = process.communicate()
if six.PY2: # py3: we get unicode strings, py2 not
# default_encoding = "UTF-8"
out = _text(out, process.stdout.encoding)
err = _text(err, process.stderr.encoding)
process.poll()
assert process.returncode is not None
command_result.stdout = out
command_result.stderr = err
command_result.returncode = process.returncode
if cls.DEBUG:
print("shell.cwd={0}".format(kwargs.get("cwd", None)))
print("shell.command: {0}".format(" ".join(cmdargs)))
print("shell.command.output:\n{0};".format(command_result.output))
except OSError as e:
command_result.stderr = u"OSError: %s" % e
command_result.returncode = e.errno
assert e.errno != 0
postprocessors = cls.POSTPROCESSOR_MAP.get(command0)
if postprocessors:
command_result = cls.postprocess_command(postprocessors, command_result)
return command_result
|
[
"def",
"run",
"(",
"cls",
",",
"command",
",",
"cwd",
"=",
"\".\"",
",",
"*",
"*",
"kwargs",
")",
":",
"assert",
"isinstance",
"(",
"command",
",",
"six",
".",
"string_types",
")",
"command_result",
"=",
"CommandResult",
"(",
")",
"command_result",
".",
"command",
"=",
"command",
"use_shell",
"=",
"cls",
".",
"USE_SHELL",
"if",
"\"shell\"",
"in",
"kwargs",
":",
"use_shell",
"=",
"kwargs",
".",
"pop",
"(",
"\"shell\"",
")",
"# -- BUILD COMMAND ARGS:",
"if",
"six",
".",
"PY2",
"and",
"isinstance",
"(",
"command",
",",
"six",
".",
"text_type",
")",
":",
"# -- PREPARE-FOR: shlex.split()",
"# In PY2, shlex.split() requires bytes string (non-unicode).",
"# In PY3, shlex.split() accepts unicode string.",
"command",
"=",
"codecs",
".",
"encode",
"(",
"command",
",",
"\"utf-8\"",
")",
"cmdargs",
"=",
"shlex",
".",
"split",
"(",
"command",
")",
"# -- TRANSFORM COMMAND (optional)",
"command0",
"=",
"cmdargs",
"[",
"0",
"]",
"real_command",
"=",
"cls",
".",
"COMMAND_MAP",
".",
"get",
"(",
"command0",
",",
"None",
")",
"if",
"real_command",
":",
"cmdargs0",
"=",
"real_command",
".",
"split",
"(",
")",
"cmdargs",
"=",
"cmdargs0",
"+",
"cmdargs",
"[",
"1",
":",
"]",
"preprocessors",
"=",
"cls",
".",
"PREPROCESSOR_MAP",
".",
"get",
"(",
"command0",
")",
"if",
"preprocessors",
":",
"cmdargs",
"=",
"cls",
".",
"preprocess_command",
"(",
"preprocessors",
",",
"cmdargs",
",",
"command",
",",
"cwd",
")",
"# -- RUN COMMAND:",
"try",
":",
"process",
"=",
"subprocess",
".",
"Popen",
"(",
"cmdargs",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
",",
"universal_newlines",
"=",
"True",
",",
"shell",
"=",
"use_shell",
",",
"cwd",
"=",
"cwd",
",",
"*",
"*",
"kwargs",
")",
"out",
",",
"err",
"=",
"process",
".",
"communicate",
"(",
")",
"if",
"six",
".",
"PY2",
":",
"# py3: we get unicode strings, py2 not",
"# default_encoding = \"UTF-8\"",
"out",
"=",
"_text",
"(",
"out",
",",
"process",
".",
"stdout",
".",
"encoding",
")",
"err",
"=",
"_text",
"(",
"err",
",",
"process",
".",
"stderr",
".",
"encoding",
")",
"process",
".",
"poll",
"(",
")",
"assert",
"process",
".",
"returncode",
"is",
"not",
"None",
"command_result",
".",
"stdout",
"=",
"out",
"command_result",
".",
"stderr",
"=",
"err",
"command_result",
".",
"returncode",
"=",
"process",
".",
"returncode",
"if",
"cls",
".",
"DEBUG",
":",
"print",
"(",
"\"shell.cwd={0}\"",
".",
"format",
"(",
"kwargs",
".",
"get",
"(",
"\"cwd\"",
",",
"None",
")",
")",
")",
"print",
"(",
"\"shell.command: {0}\"",
".",
"format",
"(",
"\" \"",
".",
"join",
"(",
"cmdargs",
")",
")",
")",
"print",
"(",
"\"shell.command.output:\\n{0};\"",
".",
"format",
"(",
"command_result",
".",
"output",
")",
")",
"except",
"OSError",
"as",
"e",
":",
"command_result",
".",
"stderr",
"=",
"u\"OSError: %s\"",
"%",
"e",
"command_result",
".",
"returncode",
"=",
"e",
".",
"errno",
"assert",
"e",
".",
"errno",
"!=",
"0",
"postprocessors",
"=",
"cls",
".",
"POSTPROCESSOR_MAP",
".",
"get",
"(",
"command0",
")",
"if",
"postprocessors",
":",
"command_result",
"=",
"cls",
".",
"postprocess_command",
"(",
"postprocessors",
",",
"command_result",
")",
"return",
"command_result"
] |
https://github.com/behave/behave/blob/e6364fe3d62c2befe34bc56471cfb317a218cd01/behave4cmd0/command_shell.py#L101-L162
|
|
tanghaibao/goatools
|
647e9dd833695f688cd16c2f9ea18f1692e5c6bc
|
goatools/randseed.py
|
python
|
RandomSeed32.prt_seed
|
(self, seed, prt=sys.stdout)
|
Print given random seed.
|
Print given random seed.
|
[
"Print",
"given",
"random",
"seed",
"."
] |
def prt_seed(self, seed, prt=sys.stdout):
"""Print given random seed."""
prt.write(" RANDOM SEED = {SEED}\n".format(SEED=self.get_seed_str(seed)))
|
[
"def",
"prt_seed",
"(",
"self",
",",
"seed",
",",
"prt",
"=",
"sys",
".",
"stdout",
")",
":",
"prt",
".",
"write",
"(",
"\" RANDOM SEED = {SEED}\\n\"",
".",
"format",
"(",
"SEED",
"=",
"self",
".",
"get_seed_str",
"(",
"seed",
")",
")",
")"
] |
https://github.com/tanghaibao/goatools/blob/647e9dd833695f688cd16c2f9ea18f1692e5c6bc/goatools/randseed.py#L35-L37
|
||
roryk/ipython-cluster-helper
|
20bca58255608937fdb24a791db3372047053e8e
|
cluster_helper/utils.py
|
python
|
safe_makedir
|
(dname)
|
return dname
|
Make a directory if it doesn't exist, handling concurrent race conditions.
|
Make a directory if it doesn't exist, handling concurrent race conditions.
|
[
"Make",
"a",
"directory",
"if",
"it",
"doesn",
"t",
"exist",
"handling",
"concurrent",
"race",
"conditions",
"."
] |
def safe_makedir(dname):
"""Make a directory if it doesn't exist, handling concurrent race conditions.
"""
if not dname:
return dname
num_tries = 0
max_tries = 5
while not os.path.exists(dname):
# we could get an error here if multiple processes are creating
# the directory at the same time. Grr, concurrency.
try:
os.makedirs(dname)
except OSError:
if num_tries > max_tries:
raise
num_tries += 1
time.sleep(2)
return dname
|
[
"def",
"safe_makedir",
"(",
"dname",
")",
":",
"if",
"not",
"dname",
":",
"return",
"dname",
"num_tries",
"=",
"0",
"max_tries",
"=",
"5",
"while",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"dname",
")",
":",
"# we could get an error here if multiple processes are creating",
"# the directory at the same time. Grr, concurrency.",
"try",
":",
"os",
".",
"makedirs",
"(",
"dname",
")",
"except",
"OSError",
":",
"if",
"num_tries",
">",
"max_tries",
":",
"raise",
"num_tries",
"+=",
"1",
"time",
".",
"sleep",
"(",
"2",
")",
"return",
"dname"
] |
https://github.com/roryk/ipython-cluster-helper/blob/20bca58255608937fdb24a791db3372047053e8e/cluster_helper/utils.py#L15-L32
|
|
timonwong/OmniMarkupPreviewer
|
21921ac7a99d2b5924a2219b33679a5b53621392
|
OmniMarkupLib/libs/bottle.py
|
python
|
BaseResponse.copy
|
(self)
|
return copy
|
Returns a copy of self.
|
Returns a copy of self.
|
[
"Returns",
"a",
"copy",
"of",
"self",
"."
] |
def copy(self):
''' Returns a copy of self. '''
copy = Response()
copy.status = self.status
copy._headers = dict((k, v[:]) for (k, v) in self._headers.items())
return copy
|
[
"def",
"copy",
"(",
"self",
")",
":",
"copy",
"=",
"Response",
"(",
")",
"copy",
".",
"status",
"=",
"self",
".",
"status",
"copy",
".",
"_headers",
"=",
"dict",
"(",
"(",
"k",
",",
"v",
"[",
":",
"]",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"self",
".",
"_headers",
".",
"items",
"(",
")",
")",
"return",
"copy"
] |
https://github.com/timonwong/OmniMarkupPreviewer/blob/21921ac7a99d2b5924a2219b33679a5b53621392/OmniMarkupLib/libs/bottle.py#L1296-L1301
|
|
wxWidgets/Phoenix
|
b2199e299a6ca6d866aa6f3d0888499136ead9d6
|
wx/lib/agw/aui/framemanager.py
|
python
|
AuiManager.OnTabPageClose
|
(self, event)
|
Handles the ``EVT_AUINOTEBOOK_PAGE_CLOSE`` event.
:param `event`: a :class:`~wx.lib.agw.aui.auibook.AuiNotebookEvent` event to be processed.
|
Handles the ``EVT_AUINOTEBOOK_PAGE_CLOSE`` event.
|
[
"Handles",
"the",
"EVT_AUINOTEBOOK_PAGE_CLOSE",
"event",
"."
] |
def OnTabPageClose(self, event):
"""
Handles the ``EVT_AUINOTEBOOK_PAGE_CLOSE`` event.
:param `event`: a :class:`~wx.lib.agw.aui.auibook.AuiNotebookEvent` event to be processed.
"""
if self._masterManager:
self._masterManager.OnTabPageClose(event)
else:
p = self.PaneFromTabEvent(event)
if p.IsOk():
# veto it because we will call "RemovePage" ourselves
event.Veto()
# Now ask the app if they really want to close...
# fire pane close event
e = AuiManagerEvent(wxEVT_AUI_PANE_CLOSE)
e.SetPane(p)
e.SetCanVeto(True)
self.ProcessMgrEvent(e)
if e.GetVeto():
return
# Close/update asynchronously, because
# the notebook which generated the event
# (and triggered this method call) will
# be deleted.
def close():
self.ClosePane(p)
self.Update()
wx.CallAfter(close)
else:
event.Skip()
|
[
"def",
"OnTabPageClose",
"(",
"self",
",",
"event",
")",
":",
"if",
"self",
".",
"_masterManager",
":",
"self",
".",
"_masterManager",
".",
"OnTabPageClose",
"(",
"event",
")",
"else",
":",
"p",
"=",
"self",
".",
"PaneFromTabEvent",
"(",
"event",
")",
"if",
"p",
".",
"IsOk",
"(",
")",
":",
"# veto it because we will call \"RemovePage\" ourselves",
"event",
".",
"Veto",
"(",
")",
"# Now ask the app if they really want to close...",
"# fire pane close event",
"e",
"=",
"AuiManagerEvent",
"(",
"wxEVT_AUI_PANE_CLOSE",
")",
"e",
".",
"SetPane",
"(",
"p",
")",
"e",
".",
"SetCanVeto",
"(",
"True",
")",
"self",
".",
"ProcessMgrEvent",
"(",
"e",
")",
"if",
"e",
".",
"GetVeto",
"(",
")",
":",
"return",
"# Close/update asynchronously, because",
"# the notebook which generated the event",
"# (and triggered this method call) will",
"# be deleted.",
"def",
"close",
"(",
")",
":",
"self",
".",
"ClosePane",
"(",
"p",
")",
"self",
".",
"Update",
"(",
")",
"wx",
".",
"CallAfter",
"(",
"close",
")",
"else",
":",
"event",
".",
"Skip",
"(",
")"
] |
https://github.com/wxWidgets/Phoenix/blob/b2199e299a6ca6d866aa6f3d0888499136ead9d6/wx/lib/agw/aui/framemanager.py#L7176-L7214
|
||
hxdengBerkeley/PointCNN.Pytorch
|
6ec6c291cf97923a84fb6ed8c82e98bf01e7e96d
|
sem_seg/indoor3d_util.py
|
python
|
collect_bounding_box
|
(anno_path, out_filename)
|
Compute bounding boxes from each instance in original dataset files on
one room. **We assume the bbox is aligned with XYZ coordinate.**
Args:
anno_path: path to annotations. e.g. Area_1/office_2/Annotations/
out_filename: path to save instance bounding boxes for that room.
each line is x1 y1 z1 x2 y2 z2 label,
where (x1,y1,z1) is the point on the diagonal closer to origin
Returns:
None
Note:
room points are shifted, the most negative point is now at origin.
|
Compute bounding boxes from each instance in original dataset files on
one room. **We assume the bbox is aligned with XYZ coordinate.**
Args:
anno_path: path to annotations. e.g. Area_1/office_2/Annotations/
out_filename: path to save instance bounding boxes for that room.
each line is x1 y1 z1 x2 y2 z2 label,
where (x1,y1,z1) is the point on the diagonal closer to origin
Returns:
None
Note:
room points are shifted, the most negative point is now at origin.
|
[
"Compute",
"bounding",
"boxes",
"from",
"each",
"instance",
"in",
"original",
"dataset",
"files",
"on",
"one",
"room",
".",
"**",
"We",
"assume",
"the",
"bbox",
"is",
"aligned",
"with",
"XYZ",
"coordinate",
".",
"**",
"Args",
":",
"anno_path",
":",
"path",
"to",
"annotations",
".",
"e",
".",
"g",
".",
"Area_1",
"/",
"office_2",
"/",
"Annotations",
"/",
"out_filename",
":",
"path",
"to",
"save",
"instance",
"bounding",
"boxes",
"for",
"that",
"room",
".",
"each",
"line",
"is",
"x1",
"y1",
"z1",
"x2",
"y2",
"z2",
"label",
"where",
"(",
"x1",
"y1",
"z1",
")",
"is",
"the",
"point",
"on",
"the",
"diagonal",
"closer",
"to",
"origin",
"Returns",
":",
"None",
"Note",
":",
"room",
"points",
"are",
"shifted",
"the",
"most",
"negative",
"point",
"is",
"now",
"at",
"origin",
"."
] |
def collect_bounding_box(anno_path, out_filename):
""" Compute bounding boxes from each instance in original dataset files on
one room. **We assume the bbox is aligned with XYZ coordinate.**
Args:
anno_path: path to annotations. e.g. Area_1/office_2/Annotations/
out_filename: path to save instance bounding boxes for that room.
each line is x1 y1 z1 x2 y2 z2 label,
where (x1,y1,z1) is the point on the diagonal closer to origin
Returns:
None
Note:
room points are shifted, the most negative point is now at origin.
"""
bbox_label_list = []
for f in glob.glob(os.path.join(anno_path, '*.txt')):
cls = os.path.basename(f).split('_')[0]
if cls not in g_classes: # note: in some room there is 'staris' class..
cls = 'clutter'
points = np.loadtxt(f)
label = g_class2label[cls]
# Compute tightest axis aligned bounding box
xyz_min = np.amin(points[:, 0:3], axis=0)
xyz_max = np.amax(points[:, 0:3], axis=0)
ins_bbox_label = np.expand_dims(
np.concatenate([xyz_min, xyz_max, np.array([label])], 0), 0)
bbox_label_list.append(ins_bbox_label)
bbox_label = np.concatenate(bbox_label_list, 0)
room_xyz_min = np.amin(bbox_label[:, 0:3], axis=0)
bbox_label[:, 0:3] -= room_xyz_min
bbox_label[:, 3:6] -= room_xyz_min
fout = open(out_filename, 'w')
for i in range(bbox_label.shape[0]):
fout.write('%f %f %f %f %f %f %d\n' % \
(bbox_label[i,0], bbox_label[i,1], bbox_label[i,2],
bbox_label[i,3], bbox_label[i,4], bbox_label[i,5],
bbox_label[i,6]))
fout.close()
|
[
"def",
"collect_bounding_box",
"(",
"anno_path",
",",
"out_filename",
")",
":",
"bbox_label_list",
"=",
"[",
"]",
"for",
"f",
"in",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"anno_path",
",",
"'*.txt'",
")",
")",
":",
"cls",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"f",
")",
".",
"split",
"(",
"'_'",
")",
"[",
"0",
"]",
"if",
"cls",
"not",
"in",
"g_classes",
":",
"# note: in some room there is 'staris' class..",
"cls",
"=",
"'clutter'",
"points",
"=",
"np",
".",
"loadtxt",
"(",
"f",
")",
"label",
"=",
"g_class2label",
"[",
"cls",
"]",
"# Compute tightest axis aligned bounding box",
"xyz_min",
"=",
"np",
".",
"amin",
"(",
"points",
"[",
":",
",",
"0",
":",
"3",
"]",
",",
"axis",
"=",
"0",
")",
"xyz_max",
"=",
"np",
".",
"amax",
"(",
"points",
"[",
":",
",",
"0",
":",
"3",
"]",
",",
"axis",
"=",
"0",
")",
"ins_bbox_label",
"=",
"np",
".",
"expand_dims",
"(",
"np",
".",
"concatenate",
"(",
"[",
"xyz_min",
",",
"xyz_max",
",",
"np",
".",
"array",
"(",
"[",
"label",
"]",
")",
"]",
",",
"0",
")",
",",
"0",
")",
"bbox_label_list",
".",
"append",
"(",
"ins_bbox_label",
")",
"bbox_label",
"=",
"np",
".",
"concatenate",
"(",
"bbox_label_list",
",",
"0",
")",
"room_xyz_min",
"=",
"np",
".",
"amin",
"(",
"bbox_label",
"[",
":",
",",
"0",
":",
"3",
"]",
",",
"axis",
"=",
"0",
")",
"bbox_label",
"[",
":",
",",
"0",
":",
"3",
"]",
"-=",
"room_xyz_min",
"bbox_label",
"[",
":",
",",
"3",
":",
"6",
"]",
"-=",
"room_xyz_min",
"fout",
"=",
"open",
"(",
"out_filename",
",",
"'w'",
")",
"for",
"i",
"in",
"range",
"(",
"bbox_label",
".",
"shape",
"[",
"0",
"]",
")",
":",
"fout",
".",
"write",
"(",
"'%f %f %f %f %f %f %d\\n'",
"%",
"(",
"bbox_label",
"[",
"i",
",",
"0",
"]",
",",
"bbox_label",
"[",
"i",
",",
"1",
"]",
",",
"bbox_label",
"[",
"i",
",",
"2",
"]",
",",
"bbox_label",
"[",
"i",
",",
"3",
"]",
",",
"bbox_label",
"[",
"i",
",",
"4",
"]",
",",
"bbox_label",
"[",
"i",
",",
"5",
"]",
",",
"bbox_label",
"[",
"i",
",",
"6",
"]",
")",
")",
"fout",
".",
"close",
"(",
")"
] |
https://github.com/hxdengBerkeley/PointCNN.Pytorch/blob/6ec6c291cf97923a84fb6ed8c82e98bf01e7e96d/sem_seg/indoor3d_util.py#L343-L383
|
||
algorhythms/LeetCode
|
3fb14aeea62a960442e47dfde9f964c7ffce32be
|
253 Meeting Rooms II.py
|
python
|
Solution.minMeetingRooms
|
(self, intervals)
|
return maxa
|
:type intervals: list[Interval]
:rtype: int
|
[] |
def minMeetingRooms(self, intervals):
"""
:type intervals: list[Interval]
:rtype: int
"""
maxa = 0
intervals.sort(key=operator.attrgetter("start"))
h_end = []
for itvl in intervals:
heapq.heappush(h_end, itvl.end)
while h_end and h_end[0] <= itvl.start:
heapq.heappop(h_end)
maxa = max(maxa, len(h_end))
return maxa
|
[
"def",
"minMeetingRooms",
"(",
"self",
",",
"intervals",
")",
":",
"maxa",
"=",
"0",
"intervals",
".",
"sort",
"(",
"key",
"=",
"operator",
".",
"attrgetter",
"(",
"\"start\"",
")",
")",
"h_end",
"=",
"[",
"]",
"for",
"itvl",
"in",
"intervals",
":",
"heapq",
".",
"heappush",
"(",
"h_end",
",",
"itvl",
".",
"end",
")",
"while",
"h_end",
"and",
"h_end",
"[",
"0",
"]",
"<=",
"itvl",
".",
"start",
":",
"heapq",
".",
"heappop",
"(",
"h_end",
")",
"maxa",
"=",
"max",
"(",
"maxa",
",",
"len",
"(",
"h_end",
")",
")",
"return",
"maxa"
] |
https://github.com/algorhythms/LeetCode/blob/3fb14aeea62a960442e47dfde9f964c7ffce32be/253 Meeting Rooms II.py#L19-L36
|
||
dimagi/commcare-hq
|
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
|
corehq/apps/domain/views/accounting.py
|
python
|
SelectedEnterprisePlanView.steps
|
(self)
|
return last_steps
|
[] |
def steps(self):
last_steps = super(SelectedEnterprisePlanView, self).steps
last_steps.append({
'title': _("2. Contact Dimagi"),
'url': reverse(SelectedEnterprisePlanView.urlname, args=[self.domain]),
})
return last_steps
|
[
"def",
"steps",
"(",
"self",
")",
":",
"last_steps",
"=",
"super",
"(",
"SelectedEnterprisePlanView",
",",
"self",
")",
".",
"steps",
"last_steps",
".",
"append",
"(",
"{",
"'title'",
":",
"_",
"(",
"\"2. Contact Dimagi\"",
")",
",",
"'url'",
":",
"reverse",
"(",
"SelectedEnterprisePlanView",
".",
"urlname",
",",
"args",
"=",
"[",
"self",
".",
"domain",
"]",
")",
",",
"}",
")",
"return",
"last_steps"
] |
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/apps/domain/views/accounting.py#L1113-L1119
|
|||
gcollazo/BrowserRefresh-Sublime
|
daee0eda6480c07f8636ed24e5c555d24e088886
|
win/pywinauto/controls/common_controls.py
|
python
|
_toolbar_button.Rectangle
|
(self)
|
return rect
|
Get the rectangle of a button on the toolbar
|
Get the rectangle of a button on the toolbar
|
[
"Get",
"the",
"rectangle",
"of",
"a",
"button",
"on",
"the",
"toolbar"
] |
def Rectangle(self):
"Get the rectangle of a button on the toolbar"
remote_mem = _RemoteMemoryBlock(self.toolbar_ctrl)
rect = win32structures.RECT()
remote_mem.Write(rect)
self.toolbar_ctrl.SendMessage(
win32defines.TB_GETRECT,
self.info.idCommand,
remote_mem)
rect = remote_mem.Read(rect)
del remote_mem
return rect
|
[
"def",
"Rectangle",
"(",
"self",
")",
":",
"remote_mem",
"=",
"_RemoteMemoryBlock",
"(",
"self",
".",
"toolbar_ctrl",
")",
"rect",
"=",
"win32structures",
".",
"RECT",
"(",
")",
"remote_mem",
".",
"Write",
"(",
"rect",
")",
"self",
".",
"toolbar_ctrl",
".",
"SendMessage",
"(",
"win32defines",
".",
"TB_GETRECT",
",",
"self",
".",
"info",
".",
"idCommand",
",",
"remote_mem",
")",
"rect",
"=",
"remote_mem",
".",
"Read",
"(",
"rect",
")",
"del",
"remote_mem",
"return",
"rect"
] |
https://github.com/gcollazo/BrowserRefresh-Sublime/blob/daee0eda6480c07f8636ed24e5c555d24e088886/win/pywinauto/controls/common_controls.py#L1669-L1687
|
|
jina-ai/jina
|
c77a492fcd5adba0fc3de5347bea83dd4e7d8087
|
jina/proto/jina_pb2_grpc.py
|
python
|
JinaRPC.Call
|
(
request_iterator,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
)
|
return grpc.experimental.stream_stream(
request_iterator,
target,
'/jina.JinaRPC/Call',
jina__pb2.DataRequestProto.SerializeToString,
jina__pb2.DataRequestProto.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
|
[] |
def Call(
request_iterator,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.stream_stream(
request_iterator,
target,
'/jina.JinaRPC/Call',
jina__pb2.DataRequestProto.SerializeToString,
jina__pb2.DataRequestProto.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
|
[
"def",
"Call",
"(",
"request_iterator",
",",
"target",
",",
"options",
"=",
"(",
")",
",",
"channel_credentials",
"=",
"None",
",",
"call_credentials",
"=",
"None",
",",
"insecure",
"=",
"False",
",",
"compression",
"=",
"None",
",",
"wait_for_ready",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
")",
":",
"return",
"grpc",
".",
"experimental",
".",
"stream_stream",
"(",
"request_iterator",
",",
"target",
",",
"'/jina.JinaRPC/Call'",
",",
"jina__pb2",
".",
"DataRequestProto",
".",
"SerializeToString",
",",
"jina__pb2",
".",
"DataRequestProto",
".",
"FromString",
",",
"options",
",",
"channel_credentials",
",",
"insecure",
",",
"call_credentials",
",",
"compression",
",",
"wait_for_ready",
",",
"timeout",
",",
"metadata",
",",
")"
] |
https://github.com/jina-ai/jina/blob/c77a492fcd5adba0fc3de5347bea83dd4e7d8087/jina/proto/jina_pb2_grpc.py#L302-L328
|
|||
Cloud-CV/EvalAI
|
1884811e7759e0d095f7afb68188a7f010fa65dc
|
scripts/monitoring/monitor_containers.py
|
python
|
notify
|
(container_names)
|
return response
|
Send slack notification for workers which are failing
Arguments:
container_names {List} -- List of container names
|
Send slack notification for workers which are failing
|
[
"Send",
"slack",
"notification",
"for",
"workers",
"which",
"are",
"failing"
] |
def notify(container_names):
"""
Send slack notification for workers which are failing
Arguments:
container_names {List} -- List of container names
"""
environment = get_environment()
message = "{} environment:\n\n Following workers are down:\n\n {}".format(
environment, " \n ".join(container_names)
)
response = send_slack_notification(message)
return response
|
[
"def",
"notify",
"(",
"container_names",
")",
":",
"environment",
"=",
"get_environment",
"(",
")",
"message",
"=",
"\"{} environment:\\n\\n Following workers are down:\\n\\n {}\"",
".",
"format",
"(",
"environment",
",",
"\" \\n \"",
".",
"join",
"(",
"container_names",
")",
")",
"response",
"=",
"send_slack_notification",
"(",
"message",
")",
"return",
"response"
] |
https://github.com/Cloud-CV/EvalAI/blob/1884811e7759e0d095f7afb68188a7f010fa65dc/scripts/monitoring/monitor_containers.py#L89-L101
|
|
yuxiaokui/Intranet-Penetration
|
f57678a204840c83cbf3308e3470ae56c5ff514b
|
proxy/XX-Net/code/default/gae_proxy/server/lib/google/appengine/tools/bulkloader.py
|
python
|
Exporter.initialize
|
(self, filename, exporter_opts)
|
Performs initialization and validation of the output file.
This implementation checks that the input file exists and can be
opened for writing.
Args:
filename: The string given as the --filename flag argument.
exporter_opts: The string given as the --exporter_opts flag argument.
|
Performs initialization and validation of the output file.
|
[
"Performs",
"initialization",
"and",
"validation",
"of",
"the",
"output",
"file",
"."
] |
def initialize(self, filename, exporter_opts):
"""Performs initialization and validation of the output file.
This implementation checks that the input file exists and can be
opened for writing.
Args:
filename: The string given as the --filename flag argument.
exporter_opts: The string given as the --exporter_opts flag argument.
"""
CheckOutputFile(filename)
self.output_filename = filename
|
[
"def",
"initialize",
"(",
"self",
",",
"filename",
",",
"exporter_opts",
")",
":",
"CheckOutputFile",
"(",
"filename",
")",
"self",
".",
"output_filename",
"=",
"filename"
] |
https://github.com/yuxiaokui/Intranet-Penetration/blob/f57678a204840c83cbf3308e3470ae56c5ff514b/proxy/XX-Net/code/default/gae_proxy/server/lib/google/appengine/tools/bulkloader.py#L3090-L3101
|
||
Khan/gae_mini_profiler
|
275e6e67c751b621f1e65c24c9a8a15631799fa4
|
sampling_profiler.py
|
python
|
InspectingThread.stop
|
(self)
|
Signal the thread to stop and block until it is finished.
|
Signal the thread to stop and block until it is finished.
|
[
"Signal",
"the",
"thread",
"to",
"stop",
"and",
"block",
"until",
"it",
"is",
"finished",
"."
] |
def stop(self):
"""Signal the thread to stop and block until it is finished."""
# http://stackoverflow.com/questions/323972/is-there-any-way-to-kill-a-thread-in-python
self._stop_event.set()
self.join()
|
[
"def",
"stop",
"(",
"self",
")",
":",
"# http://stackoverflow.com/questions/323972/is-there-any-way-to-kill-a-thread-in-python",
"self",
".",
"_stop_event",
".",
"set",
"(",
")",
"self",
".",
"join",
"(",
")"
] |
https://github.com/Khan/gae_mini_profiler/blob/275e6e67c751b621f1e65c24c9a8a15631799fa4/sampling_profiler.py#L69-L73
|
||
lsbardel/python-stdnet
|
78db5320bdedc3f28c5e4f38cda13a4469e35db7
|
stdnet/odm/base.py
|
python
|
ModelState.__init__
|
(self, instance, iid=None, action=None)
|
[] |
def __init__(self, instance, iid=None, action=None):
self._action = action or 'add'
self.deleted = False
self.score = 0
dbdata = instance.dbdata
pkname = instance._meta.pkname()
pkvalue = iid or getattr(instance, pkname, None)
if pkvalue and pkname in dbdata:
if self._action == 'add':
self._action = instance.get_state_action()
elif not pkvalue:
self._action = 'add'
pkvalue = 'new.{0}'.format(id(instance))
self._iid = pkvalue
|
[
"def",
"__init__",
"(",
"self",
",",
"instance",
",",
"iid",
"=",
"None",
",",
"action",
"=",
"None",
")",
":",
"self",
".",
"_action",
"=",
"action",
"or",
"'add'",
"self",
".",
"deleted",
"=",
"False",
"self",
".",
"score",
"=",
"0",
"dbdata",
"=",
"instance",
".",
"dbdata",
"pkname",
"=",
"instance",
".",
"_meta",
".",
"pkname",
"(",
")",
"pkvalue",
"=",
"iid",
"or",
"getattr",
"(",
"instance",
",",
"pkname",
",",
"None",
")",
"if",
"pkvalue",
"and",
"pkname",
"in",
"dbdata",
":",
"if",
"self",
".",
"_action",
"==",
"'add'",
":",
"self",
".",
"_action",
"=",
"instance",
".",
"get_state_action",
"(",
")",
"elif",
"not",
"pkvalue",
":",
"self",
".",
"_action",
"=",
"'add'",
"pkvalue",
"=",
"'new.{0}'",
".",
"format",
"(",
"id",
"(",
"instance",
")",
")",
"self",
".",
"_iid",
"=",
"pkvalue"
] |
https://github.com/lsbardel/python-stdnet/blob/78db5320bdedc3f28c5e4f38cda13a4469e35db7/stdnet/odm/base.py#L402-L415
|
||||
Nuitka/Nuitka
|
39262276993757fa4e299f497654065600453fc9
|
nuitka/build/inline_copy/lib/scons-4.3.0/SCons/Subst.py
|
python
|
scons_subst
|
(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None)
|
return result
|
Expand a string or list containing construction variable
substitutions.
This is the work-horse function for substitutions in file names
and the like. The companion scons_subst_list() function (below)
handles separating command lines into lists of arguments, so see
that function if that's what you're looking for.
|
Expand a string or list containing construction variable
substitutions.
|
[
"Expand",
"a",
"string",
"or",
"list",
"containing",
"construction",
"variable",
"substitutions",
"."
] |
def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None):
"""Expand a string or list containing construction variable
substitutions.
This is the work-horse function for substitutions in file names
and the like. The companion scons_subst_list() function (below)
handles separating command lines into lists of arguments, so see
that function if that's what you're looking for.
"""
if (isinstance(strSubst, str) and '$' not in strSubst) or isinstance(strSubst, CmdStringHolder):
return strSubst
if conv is None:
conv = _strconv[mode]
# Doing this every time is a bit of a waste, since the Executor
# has typically already populated the OverrideEnvironment with
# $TARGET/$SOURCE variables. We're keeping this (for now), though,
# because it supports existing behavior that allows us to call
# an Action directly with an arbitrary target+source pair, which
# we use in Tool/tex.py to handle calling $BIBTEX when necessary.
# If we dropped that behavior (or found another way to cover it),
# we could get rid of this call completely and just rely on the
# Executor setting the variables.
if 'TARGET' not in lvars:
d = subst_dict(target, source)
if d:
lvars = lvars.copy()
lvars.update(d)
# We're (most likely) going to eval() things. If Python doesn't
# find a __builtins__ value in the global dictionary used for eval(),
# it copies the current global values for you. Avoid this by
# setting it explicitly and then deleting, so we don't pollute the
# construction environment Dictionary(ies) that are typically used
# for expansion.
gvars['__builtins__'] = __builtins__
ss = StringSubber(env, mode, conv, gvars)
result = ss.substitute(strSubst, lvars)
try:
del gvars['__builtins__']
except KeyError:
pass
res = result
if is_String(result):
# Remove $(-$) pairs and any stuff in between,
# if that's appropriate.
remove = _regex_remove[mode]
if remove:
if mode == SUBST_SIG:
result = _list_remove[mode](remove.split(result))
if result is None:
raise SCons.Errors.UserError("Unbalanced $(/$) in: " + res)
result = ' '.join(result)
else:
result = remove.sub('', result)
if mode != SUBST_RAW:
# Compress strings of white space characters into
# a single space.
result = _space_sep.sub(' ', result).strip()
# Now replace escaped $'s currently "$$"
# This is needed because we now retain $$ instead of
# replacing them during substition to avoid
# improperly trying to escape "$$(" as being "$("
result = result.replace('$$','$')
elif is_Sequence(result):
remove = _list_remove[mode]
if remove:
result = remove(result)
if result is None:
raise SCons.Errors.UserError("Unbalanced $(/$) in: " + str(res))
return result
|
[
"def",
"scons_subst",
"(",
"strSubst",
",",
"env",
",",
"mode",
"=",
"SUBST_RAW",
",",
"target",
"=",
"None",
",",
"source",
"=",
"None",
",",
"gvars",
"=",
"{",
"}",
",",
"lvars",
"=",
"{",
"}",
",",
"conv",
"=",
"None",
")",
":",
"if",
"(",
"isinstance",
"(",
"strSubst",
",",
"str",
")",
"and",
"'$'",
"not",
"in",
"strSubst",
")",
"or",
"isinstance",
"(",
"strSubst",
",",
"CmdStringHolder",
")",
":",
"return",
"strSubst",
"if",
"conv",
"is",
"None",
":",
"conv",
"=",
"_strconv",
"[",
"mode",
"]",
"# Doing this every time is a bit of a waste, since the Executor",
"# has typically already populated the OverrideEnvironment with",
"# $TARGET/$SOURCE variables. We're keeping this (for now), though,",
"# because it supports existing behavior that allows us to call",
"# an Action directly with an arbitrary target+source pair, which",
"# we use in Tool/tex.py to handle calling $BIBTEX when necessary.",
"# If we dropped that behavior (or found another way to cover it),",
"# we could get rid of this call completely and just rely on the",
"# Executor setting the variables.",
"if",
"'TARGET'",
"not",
"in",
"lvars",
":",
"d",
"=",
"subst_dict",
"(",
"target",
",",
"source",
")",
"if",
"d",
":",
"lvars",
"=",
"lvars",
".",
"copy",
"(",
")",
"lvars",
".",
"update",
"(",
"d",
")",
"# We're (most likely) going to eval() things. If Python doesn't",
"# find a __builtins__ value in the global dictionary used for eval(),",
"# it copies the current global values for you. Avoid this by",
"# setting it explicitly and then deleting, so we don't pollute the",
"# construction environment Dictionary(ies) that are typically used",
"# for expansion.",
"gvars",
"[",
"'__builtins__'",
"]",
"=",
"__builtins__",
"ss",
"=",
"StringSubber",
"(",
"env",
",",
"mode",
",",
"conv",
",",
"gvars",
")",
"result",
"=",
"ss",
".",
"substitute",
"(",
"strSubst",
",",
"lvars",
")",
"try",
":",
"del",
"gvars",
"[",
"'__builtins__'",
"]",
"except",
"KeyError",
":",
"pass",
"res",
"=",
"result",
"if",
"is_String",
"(",
"result",
")",
":",
"# Remove $(-$) pairs and any stuff in between,",
"# if that's appropriate.",
"remove",
"=",
"_regex_remove",
"[",
"mode",
"]",
"if",
"remove",
":",
"if",
"mode",
"==",
"SUBST_SIG",
":",
"result",
"=",
"_list_remove",
"[",
"mode",
"]",
"(",
"remove",
".",
"split",
"(",
"result",
")",
")",
"if",
"result",
"is",
"None",
":",
"raise",
"SCons",
".",
"Errors",
".",
"UserError",
"(",
"\"Unbalanced $(/$) in: \"",
"+",
"res",
")",
"result",
"=",
"' '",
".",
"join",
"(",
"result",
")",
"else",
":",
"result",
"=",
"remove",
".",
"sub",
"(",
"''",
",",
"result",
")",
"if",
"mode",
"!=",
"SUBST_RAW",
":",
"# Compress strings of white space characters into",
"# a single space.",
"result",
"=",
"_space_sep",
".",
"sub",
"(",
"' '",
",",
"result",
")",
".",
"strip",
"(",
")",
"# Now replace escaped $'s currently \"$$\"",
"# This is needed because we now retain $$ instead of",
"# replacing them during substition to avoid",
"# improperly trying to escape \"$$(\" as being \"$(\"",
"result",
"=",
"result",
".",
"replace",
"(",
"'$$'",
",",
"'$'",
")",
"elif",
"is_Sequence",
"(",
"result",
")",
":",
"remove",
"=",
"_list_remove",
"[",
"mode",
"]",
"if",
"remove",
":",
"result",
"=",
"remove",
"(",
"result",
")",
"if",
"result",
"is",
"None",
":",
"raise",
"SCons",
".",
"Errors",
".",
"UserError",
"(",
"\"Unbalanced $(/$) in: \"",
"+",
"str",
"(",
"res",
")",
")",
"return",
"result"
] |
https://github.com/Nuitka/Nuitka/blob/39262276993757fa4e299f497654065600453fc9/nuitka/build/inline_copy/lib/scons-4.3.0/SCons/Subst.py#L807-L883
|
|
lovelylain/pyctp
|
fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d
|
example/pyctp2/trader/environ.py
|
python
|
Environ.matcher
|
(self)
|
return self._matcher
|
[] |
def matcher(self):
return self._matcher
|
[
"def",
"matcher",
"(",
"self",
")",
":",
"return",
"self",
".",
"_matcher"
] |
https://github.com/lovelylain/pyctp/blob/fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d/example/pyctp2/trader/environ.py#L84-L85
|
|||
GoogleCloudPlatform/cloudml-samples
|
efddc4a9898127e55edc0946557aca4bfaf59705
|
census/tftransformestimator/trainer/model.py
|
python
|
build_estimator
|
(config, tft_working_dir, embedding_size, hidden_units)
|
return tf.estimator.DNNLinearCombinedClassifier(
config=config,
linear_feature_columns=categorical_valued_columns.values(),
dnn_feature_columns=real_valued_columns.values(),
dnn_hidden_units=hidden_units or [100, 70, 50, 25])
|
Build a estimator.
Args:
config: tensorflow.python.estimator.run_config.RunConfig defining the
runtime environment for the estimator (including model_dir).
tft_working_dir : Directory pointed from the tf transform pipeline
embedding_size : Dimension of the embedding for the feature columns Channel
hidden_units : [int], the layer sizes of the DNN (input layer first)
Returns:
A DNNCombinedLinearClassifier
Raises:
LookupError is the transformed_features are not consistent with
input_metadata.py
|
Build a estimator.
|
[
"Build",
"a",
"estimator",
"."
] |
def build_estimator(config, tft_working_dir, embedding_size, hidden_units):
"""Build a estimator.
Args:
config: tensorflow.python.estimator.run_config.RunConfig defining the
runtime environment for the estimator (including model_dir).
tft_working_dir : Directory pointed from the tf transform pipeline
embedding_size : Dimension of the embedding for the feature columns Channel
hidden_units : [int], the layer sizes of the DNN (input layer first)
Returns:
A DNNCombinedLinearClassifier
Raises:
LookupError is the transformed_features are not consistent with
input_metadata.py
"""
transformed_metadata = metadata_io.read_metadata(
os.path.join(tft_working_dir, transform_fn_io.TRANSFORMED_METADATA_DIR))
transformed_features = transformed_metadata.schema.as_feature_spec()
transformed_features.pop(LABEL_KEY)
real_valued_columns = {}
categorical_valued_columns = {}
for key, tensor in transformed_features.items():
# Separate features by numeric or categorical
if key in STRING_TO_INT_FEATURE_KEYS:
if not key in CATEGORICAL_FEATURE_KEYS_TO_BE_REMOVED:
categorical_valued_columns[
key] = tf.feature_column.categorical_column_with_identity(
key=key,
num_buckets=vocabulary_size_by_name(tft_working_dir, key))
elif key in HASH_STRING_FEATURE_KEYS:
if not key in CATEGORICAL_FEATURE_KEYS_TO_BE_REMOVED:
categorical_valued_columns[
key] = tf.feature_column.categorical_column_with_identity(
key=key, num_buckets=HASH_STRING_FEATURE_KEYS[key])
elif key in NUMERIC_FEATURE_KEYS:
if not key in NUMERIC_FEATURE_KEYS_TO_BE_REMOVED:
real_valued_columns[key] = tf.feature_column.numeric_column(
key, shape=())
elif (key.endswith('_bucketized') and
key.replace('_bucketized', '') in TO_BE_BUCKETIZED_FEATURE):
categorical_valued_columns[
key] = tf.feature_column.categorical_column_with_identity(
key=key,
num_buckets=TO_BE_BUCKETIZED_FEATURE[key.replace(
'_bucketized', '')])
else:
raise LookupError(('The couple (%s,%s) is not consistent with ',
'input_metadata.py' % (key, tensor)))
# creating a new categorical features by crossing
categorical_valued_columns.update({
'education_x_occupation':
tf.feature_column.crossed_column(['education', 'occupation'],
hash_bucket_size=int(1e4)),
'age_bucketized_x_race_x_occupation':
tf.feature_column.crossed_column(
['age_bucketized', 'race', 'occupation'],
hash_bucket_size=int(1e6)),
'native_country_x_occupation':
tf.feature_column.crossed_column(['native_country', 'occupation'],
hash_bucket_size=int(1e4))
})
# creating new numeric features from categorical features
real_valued_columns.update({
# Use indicator columns for low dimensional vocabularies
'workclass_indicator':
tf.feature_column.indicator_column(
categorical_valued_columns['workclass']),
'education_indicator':
tf.feature_column.indicator_column(
categorical_valued_columns['education']),
'marital_status_indicator':
tf.feature_column.indicator_column(
categorical_valued_columns['marital_status']),
'gender_indicator':
tf.feature_column.indicator_column(
categorical_valued_columns['gender']),
'relationship_indicator':
tf.feature_column.indicator_column(
categorical_valued_columns['relationship']),
'race_indicator':
tf.feature_column.indicator_column(
categorical_valued_columns['race']),
# Use embedding columns for high dimensional vocabularies
'native_country_embedding':
tf.feature_column.embedding_column(
categorical_valued_columns['native_country'],
dimension=embedding_size),
'occupation_embedding':
tf.feature_column.embedding_column(
categorical_valued_columns['occupation'],
dimension=embedding_size),
})
return tf.estimator.DNNLinearCombinedClassifier(
config=config,
linear_feature_columns=categorical_valued_columns.values(),
dnn_feature_columns=real_valued_columns.values(),
dnn_hidden_units=hidden_units or [100, 70, 50, 25])
|
[
"def",
"build_estimator",
"(",
"config",
",",
"tft_working_dir",
",",
"embedding_size",
",",
"hidden_units",
")",
":",
"transformed_metadata",
"=",
"metadata_io",
".",
"read_metadata",
"(",
"os",
".",
"path",
".",
"join",
"(",
"tft_working_dir",
",",
"transform_fn_io",
".",
"TRANSFORMED_METADATA_DIR",
")",
")",
"transformed_features",
"=",
"transformed_metadata",
".",
"schema",
".",
"as_feature_spec",
"(",
")",
"transformed_features",
".",
"pop",
"(",
"LABEL_KEY",
")",
"real_valued_columns",
"=",
"{",
"}",
"categorical_valued_columns",
"=",
"{",
"}",
"for",
"key",
",",
"tensor",
"in",
"transformed_features",
".",
"items",
"(",
")",
":",
"# Separate features by numeric or categorical",
"if",
"key",
"in",
"STRING_TO_INT_FEATURE_KEYS",
":",
"if",
"not",
"key",
"in",
"CATEGORICAL_FEATURE_KEYS_TO_BE_REMOVED",
":",
"categorical_valued_columns",
"[",
"key",
"]",
"=",
"tf",
".",
"feature_column",
".",
"categorical_column_with_identity",
"(",
"key",
"=",
"key",
",",
"num_buckets",
"=",
"vocabulary_size_by_name",
"(",
"tft_working_dir",
",",
"key",
")",
")",
"elif",
"key",
"in",
"HASH_STRING_FEATURE_KEYS",
":",
"if",
"not",
"key",
"in",
"CATEGORICAL_FEATURE_KEYS_TO_BE_REMOVED",
":",
"categorical_valued_columns",
"[",
"key",
"]",
"=",
"tf",
".",
"feature_column",
".",
"categorical_column_with_identity",
"(",
"key",
"=",
"key",
",",
"num_buckets",
"=",
"HASH_STRING_FEATURE_KEYS",
"[",
"key",
"]",
")",
"elif",
"key",
"in",
"NUMERIC_FEATURE_KEYS",
":",
"if",
"not",
"key",
"in",
"NUMERIC_FEATURE_KEYS_TO_BE_REMOVED",
":",
"real_valued_columns",
"[",
"key",
"]",
"=",
"tf",
".",
"feature_column",
".",
"numeric_column",
"(",
"key",
",",
"shape",
"=",
"(",
")",
")",
"elif",
"(",
"key",
".",
"endswith",
"(",
"'_bucketized'",
")",
"and",
"key",
".",
"replace",
"(",
"'_bucketized'",
",",
"''",
")",
"in",
"TO_BE_BUCKETIZED_FEATURE",
")",
":",
"categorical_valued_columns",
"[",
"key",
"]",
"=",
"tf",
".",
"feature_column",
".",
"categorical_column_with_identity",
"(",
"key",
"=",
"key",
",",
"num_buckets",
"=",
"TO_BE_BUCKETIZED_FEATURE",
"[",
"key",
".",
"replace",
"(",
"'_bucketized'",
",",
"''",
")",
"]",
")",
"else",
":",
"raise",
"LookupError",
"(",
"(",
"'The couple (%s,%s) is not consistent with '",
",",
"'input_metadata.py'",
"%",
"(",
"key",
",",
"tensor",
")",
")",
")",
"# creating a new categorical features by crossing",
"categorical_valued_columns",
".",
"update",
"(",
"{",
"'education_x_occupation'",
":",
"tf",
".",
"feature_column",
".",
"crossed_column",
"(",
"[",
"'education'",
",",
"'occupation'",
"]",
",",
"hash_bucket_size",
"=",
"int",
"(",
"1e4",
")",
")",
",",
"'age_bucketized_x_race_x_occupation'",
":",
"tf",
".",
"feature_column",
".",
"crossed_column",
"(",
"[",
"'age_bucketized'",
",",
"'race'",
",",
"'occupation'",
"]",
",",
"hash_bucket_size",
"=",
"int",
"(",
"1e6",
")",
")",
",",
"'native_country_x_occupation'",
":",
"tf",
".",
"feature_column",
".",
"crossed_column",
"(",
"[",
"'native_country'",
",",
"'occupation'",
"]",
",",
"hash_bucket_size",
"=",
"int",
"(",
"1e4",
")",
")",
"}",
")",
"# creating new numeric features from categorical features",
"real_valued_columns",
".",
"update",
"(",
"{",
"# Use indicator columns for low dimensional vocabularies",
"'workclass_indicator'",
":",
"tf",
".",
"feature_column",
".",
"indicator_column",
"(",
"categorical_valued_columns",
"[",
"'workclass'",
"]",
")",
",",
"'education_indicator'",
":",
"tf",
".",
"feature_column",
".",
"indicator_column",
"(",
"categorical_valued_columns",
"[",
"'education'",
"]",
")",
",",
"'marital_status_indicator'",
":",
"tf",
".",
"feature_column",
".",
"indicator_column",
"(",
"categorical_valued_columns",
"[",
"'marital_status'",
"]",
")",
",",
"'gender_indicator'",
":",
"tf",
".",
"feature_column",
".",
"indicator_column",
"(",
"categorical_valued_columns",
"[",
"'gender'",
"]",
")",
",",
"'relationship_indicator'",
":",
"tf",
".",
"feature_column",
".",
"indicator_column",
"(",
"categorical_valued_columns",
"[",
"'relationship'",
"]",
")",
",",
"'race_indicator'",
":",
"tf",
".",
"feature_column",
".",
"indicator_column",
"(",
"categorical_valued_columns",
"[",
"'race'",
"]",
")",
",",
"# Use embedding columns for high dimensional vocabularies",
"'native_country_embedding'",
":",
"tf",
".",
"feature_column",
".",
"embedding_column",
"(",
"categorical_valued_columns",
"[",
"'native_country'",
"]",
",",
"dimension",
"=",
"embedding_size",
")",
",",
"'occupation_embedding'",
":",
"tf",
".",
"feature_column",
".",
"embedding_column",
"(",
"categorical_valued_columns",
"[",
"'occupation'",
"]",
",",
"dimension",
"=",
"embedding_size",
")",
",",
"}",
")",
"return",
"tf",
".",
"estimator",
".",
"DNNLinearCombinedClassifier",
"(",
"config",
"=",
"config",
",",
"linear_feature_columns",
"=",
"categorical_valued_columns",
".",
"values",
"(",
")",
",",
"dnn_feature_columns",
"=",
"real_valued_columns",
".",
"values",
"(",
")",
",",
"dnn_hidden_units",
"=",
"hidden_units",
"or",
"[",
"100",
",",
"70",
",",
"50",
",",
"25",
"]",
")"
] |
https://github.com/GoogleCloudPlatform/cloudml-samples/blob/efddc4a9898127e55edc0946557aca4bfaf59705/census/tftransformestimator/trainer/model.py#L166-L272
|
|
mrlesmithjr/Ansible
|
d44f0dc0d942bdf3bf7334b307e6048f0ee16e36
|
roles/ansible-vsphere-management/scripts/pdns/lib/python2.7/site-packages/setuptools/msvc.py
|
python
|
SystemInfo._use_last_dir_name
|
(self, path, prefix='')
|
return next(matching_dirs, None) or ''
|
Return name of the last dir in path or '' if no dir found.
Parameters
----------
path: str
Use dirs in this path
prefix: str
Use only dirs startings by this prefix
|
Return name of the last dir in path or '' if no dir found.
|
[
"Return",
"name",
"of",
"the",
"last",
"dir",
"in",
"path",
"or",
"if",
"no",
"dir",
"found",
"."
] |
def _use_last_dir_name(self, path, prefix=''):
"""
Return name of the last dir in path or '' if no dir found.
Parameters
----------
path: str
Use dirs in this path
prefix: str
Use only dirs startings by this prefix
"""
matching_dirs = (
dir_name
for dir_name in reversed(os.listdir(path))
if os.path.isdir(os.path.join(path, dir_name)) and
dir_name.startswith(prefix)
)
return next(matching_dirs, None) or ''
|
[
"def",
"_use_last_dir_name",
"(",
"self",
",",
"path",
",",
"prefix",
"=",
"''",
")",
":",
"matching_dirs",
"=",
"(",
"dir_name",
"for",
"dir_name",
"in",
"reversed",
"(",
"os",
".",
"listdir",
"(",
"path",
")",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"dir_name",
")",
")",
"and",
"dir_name",
".",
"startswith",
"(",
"prefix",
")",
")",
"return",
"next",
"(",
"matching_dirs",
",",
"None",
")",
"or",
"''"
] |
https://github.com/mrlesmithjr/Ansible/blob/d44f0dc0d942bdf3bf7334b307e6048f0ee16e36/roles/ansible-vsphere-management/scripts/pdns/lib/python2.7/site-packages/setuptools/msvc.py#L797-L814
|
|
frePPLe/frepple
|
57aa612030b4fcd03cb9c613f83a7dac4f0e8d6d
|
freppledb/output/widget.py
|
python
|
ShortOrdersWidget.args
|
(self)
|
return "?%s" % urlencode({"limit": self.limit})
|
[] |
def args(self):
return "?%s" % urlencode({"limit": self.limit})
|
[
"def",
"args",
"(",
"self",
")",
":",
"return",
"\"?%s\"",
"%",
"urlencode",
"(",
"{",
"\"limit\"",
":",
"self",
".",
"limit",
"}",
")"
] |
https://github.com/frePPLe/frepple/blob/57aa612030b4fcd03cb9c613f83a7dac4f0e8d6d/freppledb/output/widget.py#L142-L143
|
|||
django-getpaid/django-plans
|
a2bdf743a03e6b49232914ef30fc85e4dd9bbf76
|
plans/contrib.py
|
python
|
get_user_language
|
(user)
|
return return_value.get('language')
|
Simple helper that will fire django signal in order
to get User language possibly given by other part of application.
:param user:
:return: string or None
|
Simple helper that will fire django signal in order
to get User language possibly given by other part of application.
:param user:
:return: string or None
|
[
"Simple",
"helper",
"that",
"will",
"fire",
"django",
"signal",
"in",
"order",
"to",
"get",
"User",
"language",
"possibly",
"given",
"by",
"other",
"part",
"of",
"application",
".",
":",
"param",
"user",
":",
":",
"return",
":",
"string",
"or",
"None"
] |
def get_user_language(user):
""" Simple helper that will fire django signal in order
to get User language possibly given by other part of application.
:param user:
:return: string or None
"""
return_value = {}
user_language.send(sender=user, user=user, return_value=return_value)
return return_value.get('language')
|
[
"def",
"get_user_language",
"(",
"user",
")",
":",
"return_value",
"=",
"{",
"}",
"user_language",
".",
"send",
"(",
"sender",
"=",
"user",
",",
"user",
"=",
"user",
",",
"return_value",
"=",
"return_value",
")",
"return",
"return_value",
".",
"get",
"(",
"'language'",
")"
] |
https://github.com/django-getpaid/django-plans/blob/a2bdf743a03e6b49232914ef30fc85e4dd9bbf76/plans/contrib.py#L62-L70
|
|
fengyang95/pyCFTrackers
|
5559b9c84e2331e09e2d7c2d71ce3c6dc7f7075a
|
lib/eco/fourier_tools.py
|
python
|
cubic_spline_fourier
|
(f, a)
|
return bf
|
The continuous fourier transform of a cubic spline kernel
|
The continuous fourier transform of a cubic spline kernel
|
[
"The",
"continuous",
"fourier",
"transform",
"of",
"a",
"cubic",
"spline",
"kernel"
] |
def cubic_spline_fourier(f, a):
"""
The continuous fourier transform of a cubic spline kernel
"""
bf = - ( - 12 * a + 12 * np.exp( - np.pi * f * 2j) + 12 * np.exp(np.pi * f * 2j) + 6 * a * np.exp(-np.pi * f * 4j) + \
6 * a * np.exp(np.pi * f * 4j) + f * (np.pi * np.exp(-np.pi*f*2j)*12j) - f * (np.pi * np.exp(np.pi * f * 2j) * 12j) + \
a*f*(np.pi*np.exp(-np.pi*f*2j)*16j) - a * f * (np.pi*np.exp(np.pi*f*2j)*16j) + \
a*f*(np.pi*np.exp(-np.pi*f*4j)*4j) - a * f * (np.pi*np.exp(np.pi*f*4j)*4j)-24)
bf /= (16 * f ** 4 * np.pi ** 4)
# bf[f != 0] /= (16 * f**4 * np.pi**4)[f != 0]
bf[f == 0] = 1
return bf
|
[
"def",
"cubic_spline_fourier",
"(",
"f",
",",
"a",
")",
":",
"bf",
"=",
"-",
"(",
"-",
"12",
"*",
"a",
"+",
"12",
"*",
"np",
".",
"exp",
"(",
"-",
"np",
".",
"pi",
"*",
"f",
"*",
"2j",
")",
"+",
"12",
"*",
"np",
".",
"exp",
"(",
"np",
".",
"pi",
"*",
"f",
"*",
"2j",
")",
"+",
"6",
"*",
"a",
"*",
"np",
".",
"exp",
"(",
"-",
"np",
".",
"pi",
"*",
"f",
"*",
"4j",
")",
"+",
"6",
"*",
"a",
"*",
"np",
".",
"exp",
"(",
"np",
".",
"pi",
"*",
"f",
"*",
"4j",
")",
"+",
"f",
"*",
"(",
"np",
".",
"pi",
"*",
"np",
".",
"exp",
"(",
"-",
"np",
".",
"pi",
"*",
"f",
"*",
"2j",
")",
"*",
"12j",
")",
"-",
"f",
"*",
"(",
"np",
".",
"pi",
"*",
"np",
".",
"exp",
"(",
"np",
".",
"pi",
"*",
"f",
"*",
"2j",
")",
"*",
"12j",
")",
"+",
"a",
"*",
"f",
"*",
"(",
"np",
".",
"pi",
"*",
"np",
".",
"exp",
"(",
"-",
"np",
".",
"pi",
"*",
"f",
"*",
"2j",
")",
"*",
"16j",
")",
"-",
"a",
"*",
"f",
"*",
"(",
"np",
".",
"pi",
"*",
"np",
".",
"exp",
"(",
"np",
".",
"pi",
"*",
"f",
"*",
"2j",
")",
"*",
"16j",
")",
"+",
"a",
"*",
"f",
"*",
"(",
"np",
".",
"pi",
"*",
"np",
".",
"exp",
"(",
"-",
"np",
".",
"pi",
"*",
"f",
"*",
"4j",
")",
"*",
"4j",
")",
"-",
"a",
"*",
"f",
"*",
"(",
"np",
".",
"pi",
"*",
"np",
".",
"exp",
"(",
"np",
".",
"pi",
"*",
"f",
"*",
"4j",
")",
"*",
"4j",
")",
"-",
"24",
")",
"bf",
"/=",
"(",
"16",
"*",
"f",
"**",
"4",
"*",
"np",
".",
"pi",
"**",
"4",
")",
"# bf[f != 0] /= (16 * f**4 * np.pi**4)[f != 0]",
"bf",
"[",
"f",
"==",
"0",
"]",
"=",
"1",
"return",
"bf"
] |
https://github.com/fengyang95/pyCFTrackers/blob/5559b9c84e2331e09e2d7c2d71ce3c6dc7f7075a/lib/eco/fourier_tools.py#L65-L76
|
|
openshift/openshift-tools
|
1188778e728a6e4781acf728123e5b356380fe6f
|
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_clusterrole.py
|
python
|
Yedit.load
|
(self, content_type='yaml')
|
return self.yaml_dict
|
return yaml file
|
return yaml file
|
[
"return",
"yaml",
"file"
] |
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. {}'.format(err))
return self.yaml_dict
|
[
"def",
"load",
"(",
"self",
",",
"content_type",
"=",
"'yaml'",
")",
":",
"contents",
"=",
"self",
".",
"read",
"(",
")",
"if",
"not",
"contents",
"and",
"not",
"self",
".",
"content",
":",
"return",
"None",
"if",
"self",
".",
"content",
":",
"if",
"isinstance",
"(",
"self",
".",
"content",
",",
"dict",
")",
":",
"self",
".",
"yaml_dict",
"=",
"self",
".",
"content",
"return",
"self",
".",
"yaml_dict",
"elif",
"isinstance",
"(",
"self",
".",
"content",
",",
"str",
")",
":",
"contents",
"=",
"self",
".",
"content",
"# check if it is yaml",
"try",
":",
"if",
"content_type",
"==",
"'yaml'",
"and",
"contents",
":",
"# Try to set format attributes if supported",
"try",
":",
"self",
".",
"yaml_dict",
".",
"fa",
".",
"set_block_style",
"(",
")",
"except",
"AttributeError",
":",
"pass",
"# Try to use RoundTripLoader if supported.",
"try",
":",
"self",
".",
"yaml_dict",
"=",
"yaml",
".",
"load",
"(",
"contents",
",",
"yaml",
".",
"RoundTripLoader",
")",
"except",
"AttributeError",
":",
"self",
".",
"yaml_dict",
"=",
"yaml",
".",
"safe_load",
"(",
"contents",
")",
"# Try to set format attributes if supported",
"try",
":",
"self",
".",
"yaml_dict",
".",
"fa",
".",
"set_block_style",
"(",
")",
"except",
"AttributeError",
":",
"pass",
"elif",
"content_type",
"==",
"'json'",
"and",
"contents",
":",
"self",
".",
"yaml_dict",
"=",
"json",
".",
"loads",
"(",
"contents",
")",
"except",
"yaml",
".",
"YAMLError",
"as",
"err",
":",
"# Error loading yaml or json",
"raise",
"YeditException",
"(",
"'Problem with loading yaml file. {}'",
".",
"format",
"(",
"err",
")",
")",
"return",
"self",
".",
"yaml_dict"
] |
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_clusterrole.py#L385-L426
|
|
Wangler2333/tcp_udp_web_tools-pyqt5
|
791df73791e3e6f61643f10613c84810cdf2ffc2
|
main.py
|
python
|
MainWindow.click_clear
|
(self)
|
pushbutton_clear控件点击触发的槽
:return: None
|
pushbutton_clear控件点击触发的槽
:return: None
|
[
"pushbutton_clear控件点击触发的槽",
":",
"return",
":",
"None"
] |
def click_clear(self):
"""
pushbutton_clear控件点击触发的槽
:return: None
"""
# 清空接收区屏幕
self.textBrowser_recv.clear()
|
[
"def",
"click_clear",
"(",
"self",
")",
":",
"# 清空接收区屏幕",
"self",
".",
"textBrowser_recv",
".",
"clear",
"(",
")"
] |
https://github.com/Wangler2333/tcp_udp_web_tools-pyqt5/blob/791df73791e3e6f61643f10613c84810cdf2ffc2/main.py#L100-L106
|
||
zhaoolee/StarsAndClown
|
b2d4039cad2f9232b691e5976f787b49a0a2c113
|
node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
|
python
|
ExpandMacros
|
(string, expansions)
|
return string
|
Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict.
|
Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict.
|
[
"Expand",
"$",
"(",
"Variable",
")",
"per",
"expansions",
"dict",
".",
"See",
"MsvsSettings",
".",
"GetVSMacroEnv",
"for",
"the",
"canonical",
"way",
"to",
"retrieve",
"a",
"suitable",
"dict",
"."
] |
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
for old, new in expansions.iteritems():
assert '$(' not in new, new
string = string.replace(old, new)
return string
|
[
"def",
"ExpandMacros",
"(",
"string",
",",
"expansions",
")",
":",
"if",
"'$'",
"in",
"string",
":",
"for",
"old",
",",
"new",
"in",
"expansions",
".",
"iteritems",
"(",
")",
":",
"assert",
"'$('",
"not",
"in",
"new",
",",
"new",
"string",
"=",
"string",
".",
"replace",
"(",
"old",
",",
"new",
")",
"return",
"string"
] |
https://github.com/zhaoolee/StarsAndClown/blob/b2d4039cad2f9232b691e5976f787b49a0a2c113/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py#L940-L947
|
|
home-assistant/core
|
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
|
homeassistant/components/template/light.py
|
python
|
LightTemplate._update_state
|
(self, result)
|
Update the state from the template.
|
Update the state from the template.
|
[
"Update",
"the",
"state",
"from",
"the",
"template",
"."
] |
def _update_state(self, result):
"""Update the state from the template."""
if isinstance(result, TemplateError):
# This behavior is legacy
self._state = False
if not self._availability_template:
self._attr_available = True
return
if isinstance(result, bool):
self._state = result
return
state = str(result).lower()
if state in _VALID_STATES:
self._state = state in ("true", STATE_ON)
return
_LOGGER.error(
"Received invalid light is_on state: %s. Expected: %s",
state,
", ".join(_VALID_STATES),
)
self._state = None
|
[
"def",
"_update_state",
"(",
"self",
",",
"result",
")",
":",
"if",
"isinstance",
"(",
"result",
",",
"TemplateError",
")",
":",
"# This behavior is legacy",
"self",
".",
"_state",
"=",
"False",
"if",
"not",
"self",
".",
"_availability_template",
":",
"self",
".",
"_attr_available",
"=",
"True",
"return",
"if",
"isinstance",
"(",
"result",
",",
"bool",
")",
":",
"self",
".",
"_state",
"=",
"result",
"return",
"state",
"=",
"str",
"(",
"result",
")",
".",
"lower",
"(",
")",
"if",
"state",
"in",
"_VALID_STATES",
":",
"self",
".",
"_state",
"=",
"state",
"in",
"(",
"\"true\"",
",",
"STATE_ON",
")",
"return",
"_LOGGER",
".",
"error",
"(",
"\"Received invalid light is_on state: %s. Expected: %s\"",
",",
"state",
",",
"\", \"",
".",
"join",
"(",
"_VALID_STATES",
")",
",",
")",
"self",
".",
"_state",
"=",
"None"
] |
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/template/light.py#L526-L549
|
||
deepjyoti30/ytmdl
|
0227541f303739a01e27a6d74499229d9bf44f84
|
ytmdl/yt.py
|
python
|
is_yt_url
|
(url)
|
return match(yt_url, url)
|
Check if the passed URL is a valid youtube URL.
|
Check if the passed URL is a valid youtube URL.
|
[
"Check",
"if",
"the",
"passed",
"URL",
"is",
"a",
"valid",
"youtube",
"URL",
"."
] |
def is_yt_url(url):
"""
Check if the passed URL is a valid youtube URL.
"""
yt_url = r"https?://(www\.|music\.)?youtube\.com/watch\?v=.*?$"
return match(yt_url, url)
|
[
"def",
"is_yt_url",
"(",
"url",
")",
":",
"yt_url",
"=",
"r\"https?://(www\\.|music\\.)?youtube\\.com/watch\\?v=.*?$\"",
"return",
"match",
"(",
"yt_url",
",",
"url",
")"
] |
https://github.com/deepjyoti30/ytmdl/blob/0227541f303739a01e27a6d74499229d9bf44f84/ytmdl/yt.py#L262-L267
|
|
JulianEberius/SublimePythonIDE
|
d70e40abc0c9f347af3204c7b910e0d6bfd6e459
|
server/lib/python2/rope/refactor/similarfinder.py
|
python
|
_ASTMatcher.__init__
|
(self, body, pattern, does_match)
|
Searches the given pattern in the body AST.
body is an AST node and pattern can be either an AST node or
a list of ASTs nodes
|
Searches the given pattern in the body AST.
|
[
"Searches",
"the",
"given",
"pattern",
"in",
"the",
"body",
"AST",
"."
] |
def __init__(self, body, pattern, does_match):
"""Searches the given pattern in the body AST.
body is an AST node and pattern can be either an AST node or
a list of ASTs nodes
"""
self.body = body
self.pattern = pattern
self.matches = None
self.ropevar = _RopeVariable()
self.matches_callback = does_match
|
[
"def",
"__init__",
"(",
"self",
",",
"body",
",",
"pattern",
",",
"does_match",
")",
":",
"self",
".",
"body",
"=",
"body",
"self",
".",
"pattern",
"=",
"pattern",
"self",
".",
"matches",
"=",
"None",
"self",
".",
"ropevar",
"=",
"_RopeVariable",
"(",
")",
"self",
".",
"matches_callback",
"=",
"does_match"
] |
https://github.com/JulianEberius/SublimePythonIDE/blob/d70e40abc0c9f347af3204c7b910e0d6bfd6e459/server/lib/python2/rope/refactor/similarfinder.py#L134-L144
|
||
matthiask/pdfdocument
|
fddcd03d5561b9249000413d3df667fbf8dbb0d2
|
pdfdocument/document.py
|
python
|
PDFDocument.spacer
|
(self, height=0.6 * cm)
|
[] |
def spacer(self, height=0.6 * cm):
self.story.append(Spacer(1, height))
|
[
"def",
"spacer",
"(",
"self",
",",
"height",
"=",
"0.6",
"*",
"cm",
")",
":",
"self",
".",
"story",
".",
"append",
"(",
"Spacer",
"(",
"1",
",",
"height",
")",
")"
] |
https://github.com/matthiask/pdfdocument/blob/fddcd03d5561b9249000413d3df667fbf8dbb0d2/pdfdocument/document.py#L482-L483
|
||||
zhanghe06/python
|
a678ce38a3770c91ad12e617810bf9f5ccf7898b
|
tools/redis_pub_sub.py
|
python
|
RedisPubSub.__init__
|
(self, name, namespace='queue', **redis_kwargs)
|
The default connection parameters are: host='localhost', port=6379, db=0
|
The default connection parameters are: host='localhost', port=6379, db=0
|
[
"The",
"default",
"connection",
"parameters",
"are",
":",
"host",
"=",
"localhost",
"port",
"=",
"6379",
"db",
"=",
"0"
] |
def __init__(self, name, namespace='queue', **redis_kwargs):
"""The default connection parameters are: host='localhost', port=6379, db=0"""
self.__db = redis.Redis(**redis_kwargs)
self.key = '%s:%s' % (namespace, name)
|
[
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"namespace",
"=",
"'queue'",
",",
"*",
"*",
"redis_kwargs",
")",
":",
"self",
".",
"__db",
"=",
"redis",
".",
"Redis",
"(",
"*",
"*",
"redis_kwargs",
")",
"self",
".",
"key",
"=",
"'%s:%s'",
"%",
"(",
"namespace",
",",
"name",
")"
] |
https://github.com/zhanghe06/python/blob/a678ce38a3770c91ad12e617810bf9f5ccf7898b/tools/redis_pub_sub.py#L21-L24
|
||
khanhnamle1994/natural-language-processing
|
01d450d5ac002b0156ef4cf93a07cb508c1bcdc5
|
assignment1/.env/lib/python2.7/site-packages/pip/_vendor/ipaddress.py
|
python
|
_IPAddressBase._ip_int_from_prefix
|
(cls, prefixlen)
|
return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
|
Turn the prefix length into a bitwise netmask
Args:
prefixlen: An integer, the prefix length.
Returns:
An integer.
|
Turn the prefix length into a bitwise netmask
|
[
"Turn",
"the",
"prefix",
"length",
"into",
"a",
"bitwise",
"netmask"
] |
def _ip_int_from_prefix(cls, prefixlen):
"""Turn the prefix length into a bitwise netmask
Args:
prefixlen: An integer, the prefix length.
Returns:
An integer.
"""
return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
|
[
"def",
"_ip_int_from_prefix",
"(",
"cls",
",",
"prefixlen",
")",
":",
"return",
"cls",
".",
"_ALL_ONES",
"^",
"(",
"cls",
".",
"_ALL_ONES",
">>",
"prefixlen",
")"
] |
https://github.com/khanhnamle1994/natural-language-processing/blob/01d450d5ac002b0156ef4cf93a07cb508c1bcdc5/assignment1/.env/lib/python2.7/site-packages/pip/_vendor/ipaddress.py#L556-L566
|
|
bhoov/exbert
|
d27b6236aa51b185f7d3fed904f25cabe3baeb1a
|
server/utils/f.py
|
python
|
pick
|
(keys:Union[List, Set], obj:Dict)
|
return {k: obj[k] for k in keys}
|
Return a NEW object containing `keys` from the original `obj`
|
Return a NEW object containing `keys` from the original `obj`
|
[
"Return",
"a",
"NEW",
"object",
"containing",
"keys",
"from",
"the",
"original",
"obj"
] |
def pick(keys:Union[List, Set], obj:Dict) -> Dict:
""" Return a NEW object containing `keys` from the original `obj` """
return {k: obj[k] for k in keys}
|
[
"def",
"pick",
"(",
"keys",
":",
"Union",
"[",
"List",
",",
"Set",
"]",
",",
"obj",
":",
"Dict",
")",
"->",
"Dict",
":",
"return",
"{",
"k",
":",
"obj",
"[",
"k",
"]",
"for",
"k",
"in",
"keys",
"}"
] |
https://github.com/bhoov/exbert/blob/d27b6236aa51b185f7d3fed904f25cabe3baeb1a/server/utils/f.py#L62-L64
|
|
edfungus/Crouton
|
ada98b3930192938a48909072b45cb84b945f875
|
clients/python_clients/cf_demo_client/cf_env/lib/python2.7/site-packages/jinja2/filters.py
|
python
|
do_join
|
(eval_ctx, value, d=u'', attribute=None)
|
return soft_unicode(d).join(imap(soft_unicode, value))
|
Return a string which is the concatenation of the strings in the
sequence. The separator between elements is an empty string per
default, you can define it with the optional parameter:
.. sourcecode:: jinja
{{ [1, 2, 3]|join('|') }}
-> 1|2|3
{{ [1, 2, 3]|join }}
-> 123
It is also possible to join certain attributes of an object:
.. sourcecode:: jinja
{{ users|join(', ', attribute='username') }}
.. versionadded:: 2.6
The `attribute` parameter was added.
|
Return a string which is the concatenation of the strings in the
sequence. The separator between elements is an empty string per
default, you can define it with the optional parameter:
|
[
"Return",
"a",
"string",
"which",
"is",
"the",
"concatenation",
"of",
"the",
"strings",
"in",
"the",
"sequence",
".",
"The",
"separator",
"between",
"elements",
"is",
"an",
"empty",
"string",
"per",
"default",
"you",
"can",
"define",
"it",
"with",
"the",
"optional",
"parameter",
":"
] |
def do_join(eval_ctx, value, d=u'', attribute=None):
"""Return a string which is the concatenation of the strings in the
sequence. The separator between elements is an empty string per
default, you can define it with the optional parameter:
.. sourcecode:: jinja
{{ [1, 2, 3]|join('|') }}
-> 1|2|3
{{ [1, 2, 3]|join }}
-> 123
It is also possible to join certain attributes of an object:
.. sourcecode:: jinja
{{ users|join(', ', attribute='username') }}
.. versionadded:: 2.6
The `attribute` parameter was added.
"""
if attribute is not None:
value = imap(make_attrgetter(eval_ctx.environment, attribute), value)
# no automatic escaping? joining is a lot eaiser then
if not eval_ctx.autoescape:
return text_type(d).join(imap(text_type, value))
# if the delimiter doesn't have an html representation we check
# if any of the items has. If yes we do a coercion to Markup
if not hasattr(d, '__html__'):
value = list(value)
do_escape = False
for idx, item in enumerate(value):
if hasattr(item, '__html__'):
do_escape = True
else:
value[idx] = text_type(item)
if do_escape:
d = escape(d)
else:
d = text_type(d)
return d.join(value)
# no html involved, to normal joining
return soft_unicode(d).join(imap(soft_unicode, value))
|
[
"def",
"do_join",
"(",
"eval_ctx",
",",
"value",
",",
"d",
"=",
"u''",
",",
"attribute",
"=",
"None",
")",
":",
"if",
"attribute",
"is",
"not",
"None",
":",
"value",
"=",
"imap",
"(",
"make_attrgetter",
"(",
"eval_ctx",
".",
"environment",
",",
"attribute",
")",
",",
"value",
")",
"# no automatic escaping? joining is a lot eaiser then",
"if",
"not",
"eval_ctx",
".",
"autoescape",
":",
"return",
"text_type",
"(",
"d",
")",
".",
"join",
"(",
"imap",
"(",
"text_type",
",",
"value",
")",
")",
"# if the delimiter doesn't have an html representation we check",
"# if any of the items has. If yes we do a coercion to Markup",
"if",
"not",
"hasattr",
"(",
"d",
",",
"'__html__'",
")",
":",
"value",
"=",
"list",
"(",
"value",
")",
"do_escape",
"=",
"False",
"for",
"idx",
",",
"item",
"in",
"enumerate",
"(",
"value",
")",
":",
"if",
"hasattr",
"(",
"item",
",",
"'__html__'",
")",
":",
"do_escape",
"=",
"True",
"else",
":",
"value",
"[",
"idx",
"]",
"=",
"text_type",
"(",
"item",
")",
"if",
"do_escape",
":",
"d",
"=",
"escape",
"(",
"d",
")",
"else",
":",
"d",
"=",
"text_type",
"(",
"d",
")",
"return",
"d",
".",
"join",
"(",
"value",
")",
"# no html involved, to normal joining",
"return",
"soft_unicode",
"(",
"d",
")",
".",
"join",
"(",
"imap",
"(",
"soft_unicode",
",",
"value",
")",
")"
] |
https://github.com/edfungus/Crouton/blob/ada98b3930192938a48909072b45cb84b945f875/clients/python_clients/cf_demo_client/cf_env/lib/python2.7/site-packages/jinja2/filters.py#L291-L337
|
|
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/benchmarks/src/benchmarks/sympy/sympy/physics/quantum/state.py
|
python
|
Wavefunction.normalize
|
(self)
|
Return a normalized version of the Wavefunction
Examples
========
>>> from sympy import symbols, pi
>>> from sympy.functions import sqrt, sin
>>> from sympy.physics.quantum.state import Wavefunction
>>> x = symbols('x', real=True)
>>> L = symbols('L', positive=True)
>>> n = symbols('n', integer=True, positive=True)
>>> g = sin(n*pi*x/L)
>>> f = Wavefunction(g, (x, 0, L))
>>> f.normalize()
Wavefunction(sqrt(2)*sin(pi*n*x/L)/sqrt(L), (x, 0, L))
|
Return a normalized version of the Wavefunction
|
[
"Return",
"a",
"normalized",
"version",
"of",
"the",
"Wavefunction"
] |
def normalize(self):
"""
Return a normalized version of the Wavefunction
Examples
========
>>> from sympy import symbols, pi
>>> from sympy.functions import sqrt, sin
>>> from sympy.physics.quantum.state import Wavefunction
>>> x = symbols('x', real=True)
>>> L = symbols('L', positive=True)
>>> n = symbols('n', integer=True, positive=True)
>>> g = sin(n*pi*x/L)
>>> f = Wavefunction(g, (x, 0, L))
>>> f.normalize()
Wavefunction(sqrt(2)*sin(pi*n*x/L)/sqrt(L), (x, 0, L))
"""
const = self.norm
if const == oo:
raise NotImplementedError("The function is not normalizable!")
else:
return Wavefunction((const)**(-1)*self.expr, *self.args[1:])
|
[
"def",
"normalize",
"(",
"self",
")",
":",
"const",
"=",
"self",
".",
"norm",
"if",
"const",
"==",
"oo",
":",
"raise",
"NotImplementedError",
"(",
"\"The function is not normalizable!\"",
")",
"else",
":",
"return",
"Wavefunction",
"(",
"(",
"const",
")",
"**",
"(",
"-",
"1",
")",
"*",
"self",
".",
"expr",
",",
"*",
"self",
".",
"args",
"[",
"1",
":",
"]",
")"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/physics/quantum/state.py#L908-L932
|
||
pyppeteer/pyppeteer
|
d55b6f46a0d1315ca176a74a1d358bbbbec7e825
|
pyppeteer/network_manager.py
|
python
|
Request.failure
|
(self)
|
return {'errorText': self._failureText}
|
Return error text.
Return ``None`` unless this request was failed, as reported by
``requestfailed`` event.
When request failed, this method return dictionary which has a
``errorText`` field, which contains human-readable error message, e.g.
``'net::ERR_RAILED'``.
|
Return error text.
|
[
"Return",
"error",
"text",
"."
] |
def failure(self) -> Optional[Dict]:
"""Return error text.
Return ``None`` unless this request was failed, as reported by
``requestfailed`` event.
When request failed, this method return dictionary which has a
``errorText`` field, which contains human-readable error message, e.g.
``'net::ERR_RAILED'``.
"""
if not self._failureText:
return None
return {'errorText': self._failureText}
|
[
"def",
"failure",
"(",
"self",
")",
"->",
"Optional",
"[",
"Dict",
"]",
":",
"if",
"not",
"self",
".",
"_failureText",
":",
"return",
"None",
"return",
"{",
"'errorText'",
":",
"self",
".",
"_failureText",
"}"
] |
https://github.com/pyppeteer/pyppeteer/blob/d55b6f46a0d1315ca176a74a1d358bbbbec7e825/pyppeteer/network_manager.py#L416-L428
|
|
Chia-Network/chia-blockchain
|
34d44c1324ae634a0896f7b02eaa2802af9526cd
|
chia/daemon/server.py
|
python
|
daemon_launch_lock_path
|
(root_path: Path)
|
return root_path / "run" / "start-daemon.launching"
|
A path to a file that is lock when a daemon is launching but not yet started.
This prevents multiple instances from launching.
|
A path to a file that is lock when a daemon is launching but not yet started.
This prevents multiple instances from launching.
|
[
"A",
"path",
"to",
"a",
"file",
"that",
"is",
"lock",
"when",
"a",
"daemon",
"is",
"launching",
"but",
"not",
"yet",
"started",
".",
"This",
"prevents",
"multiple",
"instances",
"from",
"launching",
"."
] |
def daemon_launch_lock_path(root_path: Path) -> Path:
"""
A path to a file that is lock when a daemon is launching but not yet started.
This prevents multiple instances from launching.
"""
return root_path / "run" / "start-daemon.launching"
|
[
"def",
"daemon_launch_lock_path",
"(",
"root_path",
":",
"Path",
")",
"->",
"Path",
":",
"return",
"root_path",
"/",
"\"run\"",
"/",
"\"start-daemon.launching\""
] |
https://github.com/Chia-Network/chia-blockchain/blob/34d44c1324ae634a0896f7b02eaa2802af9526cd/chia/daemon/server.py#L1157-L1162
|
|
JasperSnoek/spearmint
|
b37a541be1ea035f82c7c82bbd93f5b4320e7d91
|
spearmint/spearmint/helpers.py
|
python
|
job_output_file
|
(job)
|
return os.path.join(job.expt_dir, 'output', '%08d.out' % (job.id))
|
[] |
def job_output_file(job):
return os.path.join(job.expt_dir, 'output', '%08d.out' % (job.id))
|
[
"def",
"job_output_file",
"(",
"job",
")",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"job",
".",
"expt_dir",
",",
"'output'",
",",
"'%08d.out'",
"%",
"(",
"job",
".",
"id",
")",
")"
] |
https://github.com/JasperSnoek/spearmint/blob/b37a541be1ea035f82c7c82bbd93f5b4320e7d91/spearmint/spearmint/helpers.py#L61-L62
|
|||
bear/python-twitter
|
1a148ead5029d06bec58c1cbc879764aa4b2bc74
|
twitter/models.py
|
python
|
Status.__repr__
|
(self)
|
A string representation of this twitter.Status instance.
The return value is the ID of status, username and datetime.
Returns:
string: A string representation of this twitter.Status instance with
the ID of status, username and datetime.
|
A string representation of this twitter.Status instance.
The return value is the ID of status, username and datetime.
|
[
"A",
"string",
"representation",
"of",
"this",
"twitter",
".",
"Status",
"instance",
".",
"The",
"return",
"value",
"is",
"the",
"ID",
"of",
"status",
"username",
"and",
"datetime",
"."
] |
def __repr__(self):
""" A string representation of this twitter.Status instance.
The return value is the ID of status, username and datetime.
Returns:
string: A string representation of this twitter.Status instance with
the ID of status, username and datetime.
"""
if self.tweet_mode == 'extended':
text = self.full_text
else:
text = self.text
if self.user:
return "Status(ID={0}, ScreenName={1}, Created={2}, Text={3!r})".format(
self.id,
self.user.screen_name,
self.created_at,
text)
else:
return u"Status(ID={0}, Created={1}, Text={2!r})".format(
self.id,
self.created_at,
text)
|
[
"def",
"__repr__",
"(",
"self",
")",
":",
"if",
"self",
".",
"tweet_mode",
"==",
"'extended'",
":",
"text",
"=",
"self",
".",
"full_text",
"else",
":",
"text",
"=",
"self",
".",
"text",
"if",
"self",
".",
"user",
":",
"return",
"\"Status(ID={0}, ScreenName={1}, Created={2}, Text={3!r})\"",
".",
"format",
"(",
"self",
".",
"id",
",",
"self",
".",
"user",
".",
"screen_name",
",",
"self",
".",
"created_at",
",",
"text",
")",
"else",
":",
"return",
"u\"Status(ID={0}, Created={1}, Text={2!r})\"",
".",
"format",
"(",
"self",
".",
"id",
",",
"self",
".",
"created_at",
",",
"text",
")"
] |
https://github.com/bear/python-twitter/blob/1a148ead5029d06bec58c1cbc879764aa4b2bc74/twitter/models.py#L451-L473
|
||
i3visio/osrframework
|
e02a6e9b1346ab5a01244c0d19bcec8232bf1a37
|
osrframework/utils/platforms.py
|
python
|
Platform.setCredentials
|
(self, cred)
|
Getting the credentials and appending it to self.creds
|
Getting the credentials and appending it to self.creds
|
[
"Getting",
"the",
"credentials",
"and",
"appending",
"it",
"to",
"self",
".",
"creds"
] |
def setCredentials(self, cred):
"""Getting the credentials and appending it to self.creds"""
try:
self.creds.append(cred)
except:
pass
|
[
"def",
"setCredentials",
"(",
"self",
",",
"cred",
")",
":",
"try",
":",
"self",
".",
"creds",
".",
"append",
"(",
"cred",
")",
"except",
":",
"pass"
] |
https://github.com/i3visio/osrframework/blob/e02a6e9b1346ab5a01244c0d19bcec8232bf1a37/osrframework/utils/platforms.py#L748-L753
|
||
wobine/blackboard101
|
e991ea0b98fd26059bf3806e5749b5e5f737e791
|
connection.py
|
python
|
BitcoinConnection.getbalance
|
(self, account=None, minconf=None)
|
return self.proxy.getbalance(*args)
|
Get the current balance, either for an account or the total server balance.
Arguments:
- *account* -- If this parameter is specified, returns the balance in the account.
- *minconf* -- Minimum number of confirmations required for transferred balance.
|
Get the current balance, either for an account or the total server balance.
|
[
"Get",
"the",
"current",
"balance",
"either",
"for",
"an",
"account",
"or",
"the",
"total",
"server",
"balance",
"."
] |
def getbalance(self, account=None, minconf=None):
"""
Get the current balance, either for an account or the total server balance.
Arguments:
- *account* -- If this parameter is specified, returns the balance in the account.
- *minconf* -- Minimum number of confirmations required for transferred balance.
"""
args = []
if account is not None:
args.append(account)
if minconf is not None:
args.append(minconf)
return self.proxy.getbalance(*args)
|
[
"def",
"getbalance",
"(",
"self",
",",
"account",
"=",
"None",
",",
"minconf",
"=",
"None",
")",
":",
"args",
"=",
"[",
"]",
"if",
"account",
"is",
"not",
"None",
":",
"args",
".",
"append",
"(",
"account",
")",
"if",
"minconf",
"is",
"not",
"None",
":",
"args",
".",
"append",
"(",
"minconf",
")",
"return",
"self",
".",
"proxy",
".",
"getbalance",
"(",
"*",
"args",
")"
] |
https://github.com/wobine/blackboard101/blob/e991ea0b98fd26059bf3806e5749b5e5f737e791/connection.py#L465-L479
|
|
EtienneCmb/visbrain
|
b599038e095919dc193b12d5e502d127de7d03c9
|
visbrain/objects/visbrain_obj.py
|
python
|
CombineObjects.visible_obj
|
(self)
|
return self._visible_obj
|
Get the visible_obj value.
|
Get the visible_obj value.
|
[
"Get",
"the",
"visible_obj",
"value",
"."
] |
def visible_obj(self):
"""Get the visible_obj value."""
return self._visible_obj
|
[
"def",
"visible_obj",
"(",
"self",
")",
":",
"return",
"self",
".",
"_visible_obj"
] |
https://github.com/EtienneCmb/visbrain/blob/b599038e095919dc193b12d5e502d127de7d03c9/visbrain/objects/visbrain_obj.py#L553-L555
|
|
kevinzakka/spatial-transformer-network
|
375f99046383316b18edfb5c575dc390c4ee3193
|
utils.py
|
python
|
deg2rad
|
(x)
|
return (x * np.pi) / 180
|
Converts an angle in degrees to radians.
|
Converts an angle in degrees to radians.
|
[
"Converts",
"an",
"angle",
"in",
"degrees",
"to",
"radians",
"."
] |
def deg2rad(x):
"""Converts an angle in degrees to radians."""
return (x * np.pi) / 180
|
[
"def",
"deg2rad",
"(",
"x",
")",
":",
"return",
"(",
"x",
"*",
"np",
".",
"pi",
")",
"/",
"180"
] |
https://github.com/kevinzakka/spatial-transformer-network/blob/375f99046383316b18edfb5c575dc390c4ee3193/utils.py#L32-L34
|
|
golismero/golismero
|
7d605b937e241f51c1ca4f47b20f755eeefb9d76
|
tools/sqlmap/thirdparty/bottle/bottle.py
|
python
|
Bottle.close
|
(self)
|
Close the application and all installed plugins.
|
Close the application and all installed plugins.
|
[
"Close",
"the",
"application",
"and",
"all",
"installed",
"plugins",
"."
] |
def close(self):
''' Close the application and all installed plugins. '''
for plugin in self.plugins:
if hasattr(plugin, 'close'): plugin.close()
self.stopped = True
|
[
"def",
"close",
"(",
"self",
")",
":",
"for",
"plugin",
"in",
"self",
".",
"plugins",
":",
"if",
"hasattr",
"(",
"plugin",
",",
"'close'",
")",
":",
"plugin",
".",
"close",
"(",
")",
"self",
".",
"stopped",
"=",
"True"
] |
https://github.com/golismero/golismero/blob/7d605b937e241f51c1ca4f47b20f755eeefb9d76/tools/sqlmap/thirdparty/bottle/bottle.py#L637-L641
|
||
DataDog/integrations-core
|
934674b29d94b70ccc008f76ea172d0cdae05e1e
|
network/datadog_checks/network/config_models/defaults.py
|
python
|
instance_service
|
(field, value)
|
return get_default_field_value(field, value)
|
[] |
def instance_service(field, value):
return get_default_field_value(field, value)
|
[
"def",
"instance_service",
"(",
"field",
",",
"value",
")",
":",
"return",
"get_default_field_value",
"(",
"field",
",",
"value",
")"
] |
https://github.com/DataDog/integrations-core/blob/934674b29d94b70ccc008f76ea172d0cdae05e1e/network/datadog_checks/network/config_models/defaults.py#L69-L70
|
|||
WeBankFinTech/eggroll
|
a7a68dba78b7739c771c4a6c59c343c13752e763
|
python/eggroll/core/datastructure/queue.py
|
python
|
_PySimpleQueue.put_nowait
|
(self, item)
|
return self.put(item, block=False)
|
Put an item into the queue without blocking.
This is exactly equivalent to `put(item)` and is only provided
for compatibility with the Queue class.
|
Put an item into the queue without blocking.
|
[
"Put",
"an",
"item",
"into",
"the",
"queue",
"without",
"blocking",
"."
] |
def put_nowait(self, item):
'''Put an item into the queue without blocking.
This is exactly equivalent to `put(item)` and is only provided
for compatibility with the Queue class.
'''
return self.put(item, block=False)
|
[
"def",
"put_nowait",
"(",
"self",
",",
"item",
")",
":",
"return",
"self",
".",
"put",
"(",
"item",
",",
"block",
"=",
"False",
")"
] |
https://github.com/WeBankFinTech/eggroll/blob/a7a68dba78b7739c771c4a6c59c343c13752e763/python/eggroll/core/datastructure/queue.py#L63-L69
|
|
almarklein/visvis
|
766ed97767b44a55a6ff72c742d7385e074d3d55
|
core/cameras.py
|
python
|
FlyCamera.rotation
|
()
|
return locals()
|
Get/set the current rotation quaternion.
|
Get/set the current rotation quaternion.
|
[
"Get",
"/",
"set",
"the",
"current",
"rotation",
"quaternion",
"."
] |
def rotation():
""" Get/set the current rotation quaternion.
"""
def fget(self):
return self._rotation1.copy()
def fset(self, value):
# Set
self._rotation1 = value.normalize()
# Draw
for ax in self.axeses:
ax.Draw()
return locals()
|
[
"def",
"rotation",
"(",
")",
":",
"def",
"fget",
"(",
"self",
")",
":",
"return",
"self",
".",
"_rotation1",
".",
"copy",
"(",
")",
"def",
"fset",
"(",
"self",
",",
"value",
")",
":",
"# Set",
"self",
".",
"_rotation1",
"=",
"value",
".",
"normalize",
"(",
")",
"# Draw",
"for",
"ax",
"in",
"self",
".",
"axeses",
":",
"ax",
".",
"Draw",
"(",
")",
"return",
"locals",
"(",
")"
] |
https://github.com/almarklein/visvis/blob/766ed97767b44a55a6ff72c742d7385e074d3d55/core/cameras.py#L1301-L1312
|
|
socialpoint-labs/sheetfu
|
fa9f1fc468e840923ca8e85f3a4e30f4e90a9512
|
sheetfu/model.py
|
python
|
Range.set_font_colors
|
(self, colors, batch_to=None)
|
return self.make_set_request(
field='userEnteredFormat.textFormat.foregroundColor',
data=colors,
set_parser=CellParsers.set_font_color,
batch_to=batch_to
)
|
Set font colors for the Range.
:param colors: 2D array of font colors (size must match range coordinates).
|
Set font colors for the Range.
:param colors: 2D array of font colors (size must match range coordinates).
|
[
"Set",
"font",
"colors",
"for",
"the",
"Range",
".",
":",
"param",
"colors",
":",
"2D",
"array",
"of",
"font",
"colors",
"(",
"size",
"must",
"match",
"range",
"coordinates",
")",
"."
] |
def set_font_colors(self, colors, batch_to=None):
"""
Set font colors for the Range.
:param colors: 2D array of font colors (size must match range coordinates).
"""
return self.make_set_request(
field='userEnteredFormat.textFormat.foregroundColor',
data=colors,
set_parser=CellParsers.set_font_color,
batch_to=batch_to
)
|
[
"def",
"set_font_colors",
"(",
"self",
",",
"colors",
",",
"batch_to",
"=",
"None",
")",
":",
"return",
"self",
".",
"make_set_request",
"(",
"field",
"=",
"'userEnteredFormat.textFormat.foregroundColor'",
",",
"data",
"=",
"colors",
",",
"set_parser",
"=",
"CellParsers",
".",
"set_font_color",
",",
"batch_to",
"=",
"batch_to",
")"
] |
https://github.com/socialpoint-labs/sheetfu/blob/fa9f1fc468e840923ca8e85f3a4e30f4e90a9512/sheetfu/model.py#L582-L592
|
|
natasha/natasha
|
990886ff355a7c98071a7c7fdb850f217962b076
|
natasha/doc.py
|
python
|
DocToken.lemmatize
|
(self, vocab)
|
[] |
def lemmatize(self, vocab):
self.lemma = vocab.lemmatize(self.text, self.pos, self.feats)
|
[
"def",
"lemmatize",
"(",
"self",
",",
"vocab",
")",
":",
"self",
".",
"lemma",
"=",
"vocab",
".",
"lemmatize",
"(",
"self",
".",
"text",
",",
"self",
".",
"pos",
",",
"self",
".",
"feats",
")"
] |
https://github.com/natasha/natasha/blob/990886ff355a7c98071a7c7fdb850f217962b076/natasha/doc.py#L37-L38
|
||||
taomujian/linbing
|
fe772a58f41e3b046b51a866bdb7e4655abaf51a
|
python/app/thirdparty/oneforall/modules/datasets/robtex.py
|
python
|
run
|
(domain)
|
类统一调用入口
:param str domain: 域名
|
类统一调用入口
|
[
"类统一调用入口"
] |
def run(domain):
"""
类统一调用入口
:param str domain: 域名
"""
query = Robtex(domain)
query.run()
|
[
"def",
"run",
"(",
"domain",
")",
":",
"query",
"=",
"Robtex",
"(",
"domain",
")",
"query",
".",
"run",
"(",
")"
] |
https://github.com/taomujian/linbing/blob/fe772a58f41e3b046b51a866bdb7e4655abaf51a/python/app/thirdparty/oneforall/modules/datasets/robtex.py#L47-L54
|
||
ahmetcemturan/SFACT
|
7576e29ba72b33e5058049b77b7b558875542747
|
skeinforge_application/skeinforge_plugins/analyze_plugins/vectorwrite.py
|
python
|
VectorwriteSkein.parseInitialization
|
(self)
|
Parse gcode initialization and store the parameters.
|
Parse gcode initialization and store the parameters.
|
[
"Parse",
"gcode",
"initialization",
"and",
"store",
"the",
"parameters",
"."
] |
def parseInitialization(self):
'Parse gcode initialization and store the parameters.'
for self.lineIndex in xrange(len(self.lines)):
line = self.lines[self.lineIndex]
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
firstWord = gcodec.getFirstWord(splitLine)
if firstWord == '(<decimalPlacesCarried>':
self.decimalPlacesCarried = int(splitLine[1])
elif firstWord == '(<layerHeight>':
self.layerHeight = float(splitLine[1])
elif firstWord == '(<crafting>)':
return
elif firstWord == '(<edgeWidth>':
self.edgeWidth = float(splitLine[1])
|
[
"def",
"parseInitialization",
"(",
"self",
")",
":",
"for",
"self",
".",
"lineIndex",
"in",
"xrange",
"(",
"len",
"(",
"self",
".",
"lines",
")",
")",
":",
"line",
"=",
"self",
".",
"lines",
"[",
"self",
".",
"lineIndex",
"]",
"splitLine",
"=",
"gcodec",
".",
"getSplitLineBeforeBracketSemicolon",
"(",
"line",
")",
"firstWord",
"=",
"gcodec",
".",
"getFirstWord",
"(",
"splitLine",
")",
"if",
"firstWord",
"==",
"'(<decimalPlacesCarried>'",
":",
"self",
".",
"decimalPlacesCarried",
"=",
"int",
"(",
"splitLine",
"[",
"1",
"]",
")",
"elif",
"firstWord",
"==",
"'(<layerHeight>'",
":",
"self",
".",
"layerHeight",
"=",
"float",
"(",
"splitLine",
"[",
"1",
"]",
")",
"elif",
"firstWord",
"==",
"'(<crafting>)'",
":",
"return",
"elif",
"firstWord",
"==",
"'(<edgeWidth>'",
":",
"self",
".",
"edgeWidth",
"=",
"float",
"(",
"splitLine",
"[",
"1",
"]",
")"
] |
https://github.com/ahmetcemturan/SFACT/blob/7576e29ba72b33e5058049b77b7b558875542747/skeinforge_application/skeinforge_plugins/analyze_plugins/vectorwrite.py#L306-L319
|
||
projecthamster/hamster
|
19d160090de30e756bdc3122ff935bdaa86e2843
|
waflib/extras/xcode6.py
|
python
|
PBXProject.create_target_dependency
|
(self, target, name)
|
return dependency
|
: param target : PXBNativeTarget
|
: param target : PXBNativeTarget
|
[
":",
"param",
"target",
":",
"PXBNativeTarget"
] |
def create_target_dependency(self, target, name):
""" : param target : PXBNativeTarget """
proxy = PBXContainerItemProxy(self, target, name)
dependency = PBXTargetDependency(target, proxy)
return dependency
|
[
"def",
"create_target_dependency",
"(",
"self",
",",
"target",
",",
"name",
")",
":",
"proxy",
"=",
"PBXContainerItemProxy",
"(",
"self",
",",
"target",
",",
"name",
")",
"dependency",
"=",
"PBXTargetDependency",
"(",
"target",
",",
"proxy",
")",
"return",
"dependency"
] |
https://github.com/projecthamster/hamster/blob/19d160090de30e756bdc3122ff935bdaa86e2843/waflib/extras/xcode6.py#L435-L439
|
|
VirtueSecurity/aws-extender
|
d123b7e1a845847709ba3a481f11996bddc68a1c
|
BappModules/docutils/utils/math/math2html.py
|
python
|
NumberGenerator.isroman
|
(self, type)
|
return self.deasterisk(type).lower() in self.romanlayouts
|
Find out if the layout type should have roman numeration.
|
Find out if the layout type should have roman numeration.
|
[
"Find",
"out",
"if",
"the",
"layout",
"type",
"should",
"have",
"roman",
"numeration",
"."
] |
def isroman(self, type):
"Find out if the layout type should have roman numeration."
return self.deasterisk(type).lower() in self.romanlayouts
|
[
"def",
"isroman",
"(",
"self",
",",
"type",
")",
":",
"return",
"self",
".",
"deasterisk",
"(",
"type",
")",
".",
"lower",
"(",
")",
"in",
"self",
".",
"romanlayouts"
] |
https://github.com/VirtueSecurity/aws-extender/blob/d123b7e1a845847709ba3a481f11996bddc68a1c/BappModules/docutils/utils/math/math2html.py#L3307-L3309
|
|
omz/PythonistaAppTemplate
|
f560f93f8876d82a21d108977f90583df08d55af
|
PythonistaAppTemplate/PythonistaKit.framework/pylib/site-packages/sqlalchemy/sql/schema.py
|
python
|
MetaData.append_ddl_listener
|
(self, event_name, listener)
|
Append a DDL event listener to this ``MetaData``.
.. deprecated:: 0.7
See :class:`.DDLEvents`.
|
Append a DDL event listener to this ``MetaData``.
|
[
"Append",
"a",
"DDL",
"event",
"listener",
"to",
"this",
"MetaData",
"."
] |
def append_ddl_listener(self, event_name, listener):
"""Append a DDL event listener to this ``MetaData``.
.. deprecated:: 0.7
See :class:`.DDLEvents`.
"""
def adapt_listener(target, connection, **kw):
tables = kw['tables']
listener(event, target, connection, tables=tables)
event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
|
[
"def",
"append_ddl_listener",
"(",
"self",
",",
"event_name",
",",
"listener",
")",
":",
"def",
"adapt_listener",
"(",
"target",
",",
"connection",
",",
"*",
"*",
"kw",
")",
":",
"tables",
"=",
"kw",
"[",
"'tables'",
"]",
"listener",
"(",
"event",
",",
"target",
",",
"connection",
",",
"tables",
"=",
"tables",
")",
"event",
".",
"listen",
"(",
"self",
",",
"\"\"",
"+",
"event_name",
".",
"replace",
"(",
"'-'",
",",
"'_'",
")",
",",
"adapt_listener",
")"
] |
https://github.com/omz/PythonistaAppTemplate/blob/f560f93f8876d82a21d108977f90583df08d55af/PythonistaAppTemplate/PythonistaKit.framework/pylib/site-packages/sqlalchemy/sql/schema.py#L3314-L3325
|
||
yinghdb/EmbedMask
|
e9465cb72840113ef3f2ce73a3fe475acfeb0f8e
|
fcos_core/data/datasets/evaluation/voc/voc_eval.py
|
python
|
calc_detection_voc_ap
|
(prec, rec, use_07_metric=False)
|
return ap
|
Calculate average precisions based on evaluation code of PASCAL VOC.
This function calculates average precisions
from given precisions and recalls.
The code is based on the evaluation code used in PASCAL VOC Challenge.
Args:
prec (list of numpy.array): A list of arrays.
:obj:`prec[l]` indicates precision for class :math:`l`.
If :obj:`prec[l]` is :obj:`None`, this function returns
:obj:`numpy.nan` for class :math:`l`.
rec (list of numpy.array): A list of arrays.
:obj:`rec[l]` indicates recall for class :math:`l`.
If :obj:`rec[l]` is :obj:`None`, this function returns
:obj:`numpy.nan` for class :math:`l`.
use_07_metric (bool): Whether to use PASCAL VOC 2007 evaluation metric
for calculating average precision. The default value is
:obj:`False`.
Returns:
~numpy.ndarray:
This function returns an array of average precisions.
The :math:`l`-th value corresponds to the average precision
for class :math:`l`. If :obj:`prec[l]` or :obj:`rec[l]` is
:obj:`None`, the corresponding value is set to :obj:`numpy.nan`.
|
Calculate average precisions based on evaluation code of PASCAL VOC.
This function calculates average precisions
from given precisions and recalls.
The code is based on the evaluation code used in PASCAL VOC Challenge.
Args:
prec (list of numpy.array): A list of arrays.
:obj:`prec[l]` indicates precision for class :math:`l`.
If :obj:`prec[l]` is :obj:`None`, this function returns
:obj:`numpy.nan` for class :math:`l`.
rec (list of numpy.array): A list of arrays.
:obj:`rec[l]` indicates recall for class :math:`l`.
If :obj:`rec[l]` is :obj:`None`, this function returns
:obj:`numpy.nan` for class :math:`l`.
use_07_metric (bool): Whether to use PASCAL VOC 2007 evaluation metric
for calculating average precision. The default value is
:obj:`False`.
Returns:
~numpy.ndarray:
This function returns an array of average precisions.
The :math:`l`-th value corresponds to the average precision
for class :math:`l`. If :obj:`prec[l]` or :obj:`rec[l]` is
:obj:`None`, the corresponding value is set to :obj:`numpy.nan`.
|
[
"Calculate",
"average",
"precisions",
"based",
"on",
"evaluation",
"code",
"of",
"PASCAL",
"VOC",
".",
"This",
"function",
"calculates",
"average",
"precisions",
"from",
"given",
"precisions",
"and",
"recalls",
".",
"The",
"code",
"is",
"based",
"on",
"the",
"evaluation",
"code",
"used",
"in",
"PASCAL",
"VOC",
"Challenge",
".",
"Args",
":",
"prec",
"(",
"list",
"of",
"numpy",
".",
"array",
")",
":",
"A",
"list",
"of",
"arrays",
".",
":",
"obj",
":",
"prec",
"[",
"l",
"]",
"indicates",
"precision",
"for",
"class",
":",
"math",
":",
"l",
".",
"If",
":",
"obj",
":",
"prec",
"[",
"l",
"]",
"is",
":",
"obj",
":",
"None",
"this",
"function",
"returns",
":",
"obj",
":",
"numpy",
".",
"nan",
"for",
"class",
":",
"math",
":",
"l",
".",
"rec",
"(",
"list",
"of",
"numpy",
".",
"array",
")",
":",
"A",
"list",
"of",
"arrays",
".",
":",
"obj",
":",
"rec",
"[",
"l",
"]",
"indicates",
"recall",
"for",
"class",
":",
"math",
":",
"l",
".",
"If",
":",
"obj",
":",
"rec",
"[",
"l",
"]",
"is",
":",
"obj",
":",
"None",
"this",
"function",
"returns",
":",
"obj",
":",
"numpy",
".",
"nan",
"for",
"class",
":",
"math",
":",
"l",
".",
"use_07_metric",
"(",
"bool",
")",
":",
"Whether",
"to",
"use",
"PASCAL",
"VOC",
"2007",
"evaluation",
"metric",
"for",
"calculating",
"average",
"precision",
".",
"The",
"default",
"value",
"is",
":",
"obj",
":",
"False",
".",
"Returns",
":",
"~numpy",
".",
"ndarray",
":",
"This",
"function",
"returns",
"an",
"array",
"of",
"average",
"precisions",
".",
"The",
":",
"math",
":",
"l",
"-",
"th",
"value",
"corresponds",
"to",
"the",
"average",
"precision",
"for",
"class",
":",
"math",
":",
"l",
".",
"If",
":",
"obj",
":",
"prec",
"[",
"l",
"]",
"or",
":",
"obj",
":",
"rec",
"[",
"l",
"]",
"is",
":",
"obj",
":",
"None",
"the",
"corresponding",
"value",
"is",
"set",
"to",
":",
"obj",
":",
"numpy",
".",
"nan",
"."
] |
def calc_detection_voc_ap(prec, rec, use_07_metric=False):
"""Calculate average precisions based on evaluation code of PASCAL VOC.
This function calculates average precisions
from given precisions and recalls.
The code is based on the evaluation code used in PASCAL VOC Challenge.
Args:
prec (list of numpy.array): A list of arrays.
:obj:`prec[l]` indicates precision for class :math:`l`.
If :obj:`prec[l]` is :obj:`None`, this function returns
:obj:`numpy.nan` for class :math:`l`.
rec (list of numpy.array): A list of arrays.
:obj:`rec[l]` indicates recall for class :math:`l`.
If :obj:`rec[l]` is :obj:`None`, this function returns
:obj:`numpy.nan` for class :math:`l`.
use_07_metric (bool): Whether to use PASCAL VOC 2007 evaluation metric
for calculating average precision. The default value is
:obj:`False`.
Returns:
~numpy.ndarray:
This function returns an array of average precisions.
The :math:`l`-th value corresponds to the average precision
for class :math:`l`. If :obj:`prec[l]` or :obj:`rec[l]` is
:obj:`None`, the corresponding value is set to :obj:`numpy.nan`.
"""
n_fg_class = len(prec)
ap = np.empty(n_fg_class)
for l in range(n_fg_class):
if prec[l] is None or rec[l] is None:
ap[l] = np.nan
continue
if use_07_metric:
# 11 point metric
ap[l] = 0
for t in np.arange(0.0, 1.1, 0.1):
if np.sum(rec[l] >= t) == 0:
p = 0
else:
p = np.max(np.nan_to_num(prec[l])[rec[l] >= t])
ap[l] += p / 11
else:
# correct AP calculation
# first append sentinel values at the end
mpre = np.concatenate(([0], np.nan_to_num(prec[l]), [0]))
mrec = np.concatenate(([0], rec[l], [1]))
mpre = np.maximum.accumulate(mpre[::-1])[::-1]
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap[l] = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
|
[
"def",
"calc_detection_voc_ap",
"(",
"prec",
",",
"rec",
",",
"use_07_metric",
"=",
"False",
")",
":",
"n_fg_class",
"=",
"len",
"(",
"prec",
")",
"ap",
"=",
"np",
".",
"empty",
"(",
"n_fg_class",
")",
"for",
"l",
"in",
"range",
"(",
"n_fg_class",
")",
":",
"if",
"prec",
"[",
"l",
"]",
"is",
"None",
"or",
"rec",
"[",
"l",
"]",
"is",
"None",
":",
"ap",
"[",
"l",
"]",
"=",
"np",
".",
"nan",
"continue",
"if",
"use_07_metric",
":",
"# 11 point metric",
"ap",
"[",
"l",
"]",
"=",
"0",
"for",
"t",
"in",
"np",
".",
"arange",
"(",
"0.0",
",",
"1.1",
",",
"0.1",
")",
":",
"if",
"np",
".",
"sum",
"(",
"rec",
"[",
"l",
"]",
">=",
"t",
")",
"==",
"0",
":",
"p",
"=",
"0",
"else",
":",
"p",
"=",
"np",
".",
"max",
"(",
"np",
".",
"nan_to_num",
"(",
"prec",
"[",
"l",
"]",
")",
"[",
"rec",
"[",
"l",
"]",
">=",
"t",
"]",
")",
"ap",
"[",
"l",
"]",
"+=",
"p",
"/",
"11",
"else",
":",
"# correct AP calculation",
"# first append sentinel values at the end",
"mpre",
"=",
"np",
".",
"concatenate",
"(",
"(",
"[",
"0",
"]",
",",
"np",
".",
"nan_to_num",
"(",
"prec",
"[",
"l",
"]",
")",
",",
"[",
"0",
"]",
")",
")",
"mrec",
"=",
"np",
".",
"concatenate",
"(",
"(",
"[",
"0",
"]",
",",
"rec",
"[",
"l",
"]",
",",
"[",
"1",
"]",
")",
")",
"mpre",
"=",
"np",
".",
"maximum",
".",
"accumulate",
"(",
"mpre",
"[",
":",
":",
"-",
"1",
"]",
")",
"[",
":",
":",
"-",
"1",
"]",
"# to calculate area under PR curve, look for points",
"# where X axis (recall) changes value",
"i",
"=",
"np",
".",
"where",
"(",
"mrec",
"[",
"1",
":",
"]",
"!=",
"mrec",
"[",
":",
"-",
"1",
"]",
")",
"[",
"0",
"]",
"# and sum (\\Delta recall) * prec",
"ap",
"[",
"l",
"]",
"=",
"np",
".",
"sum",
"(",
"(",
"mrec",
"[",
"i",
"+",
"1",
"]",
"-",
"mrec",
"[",
"i",
"]",
")",
"*",
"mpre",
"[",
"i",
"+",
"1",
"]",
")",
"return",
"ap"
] |
https://github.com/yinghdb/EmbedMask/blob/e9465cb72840113ef3f2ce73a3fe475acfeb0f8e/fcos_core/data/datasets/evaluation/voc/voc_eval.py#L158-L214
|
|
gsig/PyVideoResearch
|
46307b1a03ce670696297e2154ddee6f4e6b0b8a
|
datasets/dataset.py
|
python
|
Dataset.__getitem__
|
(self, index)
|
Args:
index (int): Index
Returns:
if self.split == 'val video'
list of tuples: (image, target)
else:
tuple: (image, target) target is label tensor
|
Args:
index (int): Index
Returns:
if self.split == 'val video'
list of tuples: (image, target)
else:
tuple: (image, target) target is label tensor
|
[
"Args",
":",
"index",
"(",
"int",
")",
":",
"Index",
"Returns",
":",
"if",
"self",
".",
"split",
"==",
"val",
"video",
"list",
"of",
"tuples",
":",
"(",
"image",
"target",
")",
"else",
":",
"tuple",
":",
"(",
"image",
"target",
")",
"target",
"is",
"label",
"tensor"
] |
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
if self.split == 'val video'
list of tuples: (image, target)
else:
tuple: (image, target) target is label tensor
"""
try:
if self.split == 'val_video':
print('preparing video across {} locations'.format(self.test_gap))
return [self.get_item(index, shift=t)
for t in np.linspace(0, 1.0, self.test_gap)]
else:
return self.get_item(index, shift=None)
except Exception as e:
print('error getting item {}, moving on to next item'.format(index))
print(e)
return self.__getitem__((index + 1) % len(self))
|
[
"def",
"__getitem__",
"(",
"self",
",",
"index",
")",
":",
"try",
":",
"if",
"self",
".",
"split",
"==",
"'val_video'",
":",
"print",
"(",
"'preparing video across {} locations'",
".",
"format",
"(",
"self",
".",
"test_gap",
")",
")",
"return",
"[",
"self",
".",
"get_item",
"(",
"index",
",",
"shift",
"=",
"t",
")",
"for",
"t",
"in",
"np",
".",
"linspace",
"(",
"0",
",",
"1.0",
",",
"self",
".",
"test_gap",
")",
"]",
"else",
":",
"return",
"self",
".",
"get_item",
"(",
"index",
",",
"shift",
"=",
"None",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"'error getting item {}, moving on to next item'",
".",
"format",
"(",
"index",
")",
")",
"print",
"(",
"e",
")",
"return",
"self",
".",
"__getitem__",
"(",
"(",
"index",
"+",
"1",
")",
"%",
"len",
"(",
"self",
")",
")"
] |
https://github.com/gsig/PyVideoResearch/blob/46307b1a03ce670696297e2154ddee6f4e6b0b8a/datasets/dataset.py#L32-L52
|
||
nortikin/sverchok
|
7b460f01317c15f2681bfa3e337c5e7346f3711b
|
utils/turtle.py
|
python
|
Turtle.get_selected_faces_pydata
|
(self)
|
return [[vert.index for vert in face.verts] for face in self.bmesh.faces if face.select]
|
Return list of selected faces in Sverchok format.
|
Return list of selected faces in Sverchok format.
|
[
"Return",
"list",
"of",
"selected",
"faces",
"in",
"Sverchok",
"format",
"."
] |
def get_selected_faces_pydata(self):
"""
Return list of selected faces in Sverchok format.
"""
return [[vert.index for vert in face.verts] for face in self.bmesh.faces if face.select]
|
[
"def",
"get_selected_faces_pydata",
"(",
"self",
")",
":",
"return",
"[",
"[",
"vert",
".",
"index",
"for",
"vert",
"in",
"face",
".",
"verts",
"]",
"for",
"face",
"in",
"self",
".",
"bmesh",
".",
"faces",
"if",
"face",
".",
"select",
"]"
] |
https://github.com/nortikin/sverchok/blob/7b460f01317c15f2681bfa3e337c5e7346f3711b/utils/turtle.py#L620-L624
|
|
blawar/nut
|
2cf351400418399a70164987e28670309f6c9cb5
|
plugins/discord/__init__.py
|
python
|
formatSize
|
(n)
|
return str(round(n, 1)) + ' PB'
|
[] |
def formatSize(n):
if not n:
return None
for i in range(len(prefixes)):
if n < 1024:
return str(round(n, 1)) + prefixes[i];
n /= 1024;
return str(round(n, 1)) + ' PB';
|
[
"def",
"formatSize",
"(",
"n",
")",
":",
"if",
"not",
"n",
":",
"return",
"None",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"prefixes",
")",
")",
":",
"if",
"n",
"<",
"1024",
":",
"return",
"str",
"(",
"round",
"(",
"n",
",",
"1",
")",
")",
"+",
"prefixes",
"[",
"i",
"]",
"n",
"/=",
"1024",
"return",
"str",
"(",
"round",
"(",
"n",
",",
"1",
")",
")",
"+",
"' PB'"
] |
https://github.com/blawar/nut/blob/2cf351400418399a70164987e28670309f6c9cb5/plugins/discord/__init__.py#L41-L51
|
|||
securityclippy/elasticintel
|
aa08d3e9f5ab1c000128e95161139ce97ff0e334
|
ingest_feed_lambda/numpy/lib/npyio.py
|
python
|
NpzFile.iterkeys
|
(self)
|
return self.__iter__()
|
Return an iterator over the files in the archive.
|
Return an iterator over the files in the archive.
|
[
"Return",
"an",
"iterator",
"over",
"the",
"files",
"in",
"the",
"archive",
"."
] |
def iterkeys(self):
"""Return an iterator over the files in the archive."""
return self.__iter__()
|
[
"def",
"iterkeys",
"(",
"self",
")",
":",
"return",
"self",
".",
"__iter__",
"(",
")"
] |
https://github.com/securityclippy/elasticintel/blob/aa08d3e9f5ab1c000128e95161139ce97ff0e334/ingest_feed_lambda/numpy/lib/npyio.py#L258-L260
|
|
asyml/texar-pytorch
|
b83d3ec17e19da08fc5f81996d02f91176e55e54
|
texar/torch/utils/utils.py
|
python
|
flatten_dict
|
(dict_: AnyDict, parent_key: str = "", sep: str = ".")
|
return dict(items)
|
r"""Flattens a nested dictionary. Namedtuples within the dictionary are
also converted to dictionaries.
Adapted from:
https://github.com/google/seq2seq/blob/master/seq2seq/models/model_base.py
Args:
dict\_ (dict): The dictionary to flatten.
parent_key (str): A prefix to prepend to each key.
sep (str): Separator that intervenes between parent and child keys.
For example, if :attr:`sep` == ``"."``, then ``{ "a": { "b": 3 } }``
is converted into ``{ "a.b": 3 }``.
Returns:
A new flattened ``dict``.
|
r"""Flattens a nested dictionary. Namedtuples within the dictionary are
also converted to dictionaries.
|
[
"r",
"Flattens",
"a",
"nested",
"dictionary",
".",
"Namedtuples",
"within",
"the",
"dictionary",
"are",
"also",
"converted",
"to",
"dictionaries",
"."
] |
def flatten_dict(dict_: AnyDict, parent_key: str = "", sep: str = "."):
r"""Flattens a nested dictionary. Namedtuples within the dictionary are
also converted to dictionaries.
Adapted from:
https://github.com/google/seq2seq/blob/master/seq2seq/models/model_base.py
Args:
dict\_ (dict): The dictionary to flatten.
parent_key (str): A prefix to prepend to each key.
sep (str): Separator that intervenes between parent and child keys.
For example, if :attr:`sep` == ``"."``, then ``{ "a": { "b": 3 } }``
is converted into ``{ "a.b": 3 }``.
Returns:
A new flattened ``dict``.
"""
items: List[Tuple[str, Any]] = []
for key, value in dict_.items():
key_ = parent_key + sep + key if parent_key else key
if isinstance(value, MutableMapping):
items.extend(flatten_dict(value, key_, sep=sep).items())
elif isinstance(value, tuple) and hasattr(value, '_asdict'):
# namedtuple
dict_items = collections.OrderedDict(
zip(value._fields, value)) # type: ignore
items.extend(flatten_dict(dict_items, key_, sep=sep).items())
else:
items.append((key_, value))
return dict(items)
|
[
"def",
"flatten_dict",
"(",
"dict_",
":",
"AnyDict",
",",
"parent_key",
":",
"str",
"=",
"\"\"",
",",
"sep",
":",
"str",
"=",
"\".\"",
")",
":",
"items",
":",
"List",
"[",
"Tuple",
"[",
"str",
",",
"Any",
"]",
"]",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"dict_",
".",
"items",
"(",
")",
":",
"key_",
"=",
"parent_key",
"+",
"sep",
"+",
"key",
"if",
"parent_key",
"else",
"key",
"if",
"isinstance",
"(",
"value",
",",
"MutableMapping",
")",
":",
"items",
".",
"extend",
"(",
"flatten_dict",
"(",
"value",
",",
"key_",
",",
"sep",
"=",
"sep",
")",
".",
"items",
"(",
")",
")",
"elif",
"isinstance",
"(",
"value",
",",
"tuple",
")",
"and",
"hasattr",
"(",
"value",
",",
"'_asdict'",
")",
":",
"# namedtuple",
"dict_items",
"=",
"collections",
".",
"OrderedDict",
"(",
"zip",
"(",
"value",
".",
"_fields",
",",
"value",
")",
")",
"# type: ignore",
"items",
".",
"extend",
"(",
"flatten_dict",
"(",
"dict_items",
",",
"key_",
",",
"sep",
"=",
"sep",
")",
".",
"items",
"(",
")",
")",
"else",
":",
"items",
".",
"append",
"(",
"(",
"key_",
",",
"value",
")",
")",
"return",
"dict",
"(",
"items",
")"
] |
https://github.com/asyml/texar-pytorch/blob/b83d3ec17e19da08fc5f81996d02f91176e55e54/texar/torch/utils/utils.py#L793-L822
|
|
nkolot/SPIN
|
5c796852ca7ca7373e104e8489aa5864323fbf84
|
models/hmr.py
|
python
|
HMR._make_layer
|
(self, block, planes, blocks, stride=1)
|
return nn.Sequential(*layers)
|
[] |
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
|
[
"def",
"_make_layer",
"(",
"self",
",",
"block",
",",
"planes",
",",
"blocks",
",",
"stride",
"=",
"1",
")",
":",
"downsample",
"=",
"None",
"if",
"stride",
"!=",
"1",
"or",
"self",
".",
"inplanes",
"!=",
"planes",
"*",
"block",
".",
"expansion",
":",
"downsample",
"=",
"nn",
".",
"Sequential",
"(",
"nn",
".",
"Conv2d",
"(",
"self",
".",
"inplanes",
",",
"planes",
"*",
"block",
".",
"expansion",
",",
"kernel_size",
"=",
"1",
",",
"stride",
"=",
"stride",
",",
"bias",
"=",
"False",
")",
",",
"nn",
".",
"BatchNorm2d",
"(",
"planes",
"*",
"block",
".",
"expansion",
")",
",",
")",
"layers",
"=",
"[",
"]",
"layers",
".",
"append",
"(",
"block",
"(",
"self",
".",
"inplanes",
",",
"planes",
",",
"stride",
",",
"downsample",
")",
")",
"self",
".",
"inplanes",
"=",
"planes",
"*",
"block",
".",
"expansion",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"blocks",
")",
":",
"layers",
".",
"append",
"(",
"block",
"(",
"self",
".",
"inplanes",
",",
"planes",
")",
")",
"return",
"nn",
".",
"Sequential",
"(",
"*",
"layers",
")"
] |
https://github.com/nkolot/SPIN/blob/5c796852ca7ca7373e104e8489aa5864323fbf84/models/hmr.py#L95-L110
|
|||
omz/PythonistaAppTemplate
|
f560f93f8876d82a21d108977f90583df08d55af
|
PythonistaAppTemplate/PythonistaKit.framework/pylib_ext/mpl_toolkits/gtktools.py
|
python
|
simple_message
|
(msg, parent=None, title=None)
|
return None
|
create a simple message dialog with string msg. Optionally set
the parent widget and dialog title
|
create a simple message dialog with string msg. Optionally set
the parent widget and dialog title
|
[
"create",
"a",
"simple",
"message",
"dialog",
"with",
"string",
"msg",
".",
"Optionally",
"set",
"the",
"parent",
"widget",
"and",
"dialog",
"title"
] |
def simple_message(msg, parent=None, title=None):
"""
create a simple message dialog with string msg. Optionally set
the parent widget and dialog title
"""
dialog = gtk.MessageDialog(
parent = None,
type = gtk.MESSAGE_INFO,
buttons = gtk.BUTTONS_OK,
message_format = msg)
if parent is not None:
dialog.set_transient_for(parent)
if title is not None:
dialog.set_title(title)
dialog.show()
dialog.run()
dialog.destroy()
return None
|
[
"def",
"simple_message",
"(",
"msg",
",",
"parent",
"=",
"None",
",",
"title",
"=",
"None",
")",
":",
"dialog",
"=",
"gtk",
".",
"MessageDialog",
"(",
"parent",
"=",
"None",
",",
"type",
"=",
"gtk",
".",
"MESSAGE_INFO",
",",
"buttons",
"=",
"gtk",
".",
"BUTTONS_OK",
",",
"message_format",
"=",
"msg",
")",
"if",
"parent",
"is",
"not",
"None",
":",
"dialog",
".",
"set_transient_for",
"(",
"parent",
")",
"if",
"title",
"is",
"not",
"None",
":",
"dialog",
".",
"set_title",
"(",
"title",
")",
"dialog",
".",
"show",
"(",
")",
"dialog",
".",
"run",
"(",
")",
"dialog",
".",
"destroy",
"(",
")",
"return",
"None"
] |
https://github.com/omz/PythonistaAppTemplate/blob/f560f93f8876d82a21d108977f90583df08d55af/PythonistaAppTemplate/PythonistaKit.framework/pylib_ext/mpl_toolkits/gtktools.py#L64-L81
|
|
khalim19/gimp-plugin-export-layers
|
b37255f2957ad322f4d332689052351cdea6e563
|
export_layers/pygimplib/_lib/python_standard_modules/logging/__init__.py
|
python
|
Manager.getLogger
|
(self, name)
|
return rv
|
Get a logger with the specified name (channel name), creating it
if it doesn't yet exist. This name is a dot-separated hierarchical
name, such as "a", "a.b", "a.b.c" or similar.
If a PlaceHolder existed for the specified name [i.e. the logger
didn't exist but a child of it did], replace it with the created
logger and fix up the parent/child references which pointed to the
placeholder to now point to the logger.
|
Get a logger with the specified name (channel name), creating it
if it doesn't yet exist. This name is a dot-separated hierarchical
name, such as "a", "a.b", "a.b.c" or similar.
|
[
"Get",
"a",
"logger",
"with",
"the",
"specified",
"name",
"(",
"channel",
"name",
")",
"creating",
"it",
"if",
"it",
"doesn",
"t",
"yet",
"exist",
".",
"This",
"name",
"is",
"a",
"dot",
"-",
"separated",
"hierarchical",
"name",
"such",
"as",
"a",
"a",
".",
"b",
"a",
".",
"b",
".",
"c",
"or",
"similar",
"."
] |
def getLogger(self, name):
"""
Get a logger with the specified name (channel name), creating it
if it doesn't yet exist. This name is a dot-separated hierarchical
name, such as "a", "a.b", "a.b.c" or similar.
If a PlaceHolder existed for the specified name [i.e. the logger
didn't exist but a child of it did], replace it with the created
logger and fix up the parent/child references which pointed to the
placeholder to now point to the logger.
"""
rv = None
if not isinstance(name, basestring):
raise TypeError('A logger name must be string or Unicode')
if isinstance(name, unicode):
name = name.encode('utf-8')
_acquireLock()
try:
if name in self.loggerDict:
rv = self.loggerDict[name]
if isinstance(rv, PlaceHolder):
ph = rv
rv = (self.loggerClass or _loggerClass)(name)
rv.manager = self
self.loggerDict[name] = rv
self._fixupChildren(ph, rv)
self._fixupParents(rv)
else:
rv = (self.loggerClass or _loggerClass)(name)
rv.manager = self
self.loggerDict[name] = rv
self._fixupParents(rv)
finally:
_releaseLock()
return rv
|
[
"def",
"getLogger",
"(",
"self",
",",
"name",
")",
":",
"rv",
"=",
"None",
"if",
"not",
"isinstance",
"(",
"name",
",",
"basestring",
")",
":",
"raise",
"TypeError",
"(",
"'A logger name must be string or Unicode'",
")",
"if",
"isinstance",
"(",
"name",
",",
"unicode",
")",
":",
"name",
"=",
"name",
".",
"encode",
"(",
"'utf-8'",
")",
"_acquireLock",
"(",
")",
"try",
":",
"if",
"name",
"in",
"self",
".",
"loggerDict",
":",
"rv",
"=",
"self",
".",
"loggerDict",
"[",
"name",
"]",
"if",
"isinstance",
"(",
"rv",
",",
"PlaceHolder",
")",
":",
"ph",
"=",
"rv",
"rv",
"=",
"(",
"self",
".",
"loggerClass",
"or",
"_loggerClass",
")",
"(",
"name",
")",
"rv",
".",
"manager",
"=",
"self",
"self",
".",
"loggerDict",
"[",
"name",
"]",
"=",
"rv",
"self",
".",
"_fixupChildren",
"(",
"ph",
",",
"rv",
")",
"self",
".",
"_fixupParents",
"(",
"rv",
")",
"else",
":",
"rv",
"=",
"(",
"self",
".",
"loggerClass",
"or",
"_loggerClass",
")",
"(",
"name",
")",
"rv",
".",
"manager",
"=",
"self",
"self",
".",
"loggerDict",
"[",
"name",
"]",
"=",
"rv",
"self",
".",
"_fixupParents",
"(",
"rv",
")",
"finally",
":",
"_releaseLock",
"(",
")",
"return",
"rv"
] |
https://github.com/khalim19/gimp-plugin-export-layers/blob/b37255f2957ad322f4d332689052351cdea6e563/export_layers/pygimplib/_lib/python_standard_modules/logging/__init__.py#L1025-L1059
|
|
deepchem/deepchem
|
054eb4b2b082e3df8e1a8e77f36a52137ae6e375
|
deepchem/feat/complex_featurizers/grid_featurizers.py
|
python
|
HydrogenBondCounter._featurize
|
(self, datapoint, **kwargs)
|
return np.concatenate(pairwise_features, axis=-1)
|
Compute featurization for a single mol/protein complex
Parameters
----------
datapoint: Tuple[str, str]
Filenames for molecule and protein.
|
Compute featurization for a single mol/protein complex
|
[
"Compute",
"featurization",
"for",
"a",
"single",
"mol",
"/",
"protein",
"complex"
] |
def _featurize(self, datapoint, **kwargs): # -> Optional[np.ndarray]:
"""
Compute featurization for a single mol/protein complex
Parameters
----------
datapoint: Tuple[str, str]
Filenames for molecule and protein.
"""
if 'complex' in kwargs:
datapoint = kwargs.get("complex")
raise DeprecationWarning(
'Complex is being phased out as a parameter, please pass "datapoint" instead.'
)
try:
fragments = rdkit_utils.load_complex(datapoint, add_hydrogens=False)
except MoleculeLoadException:
logger.warning("This molecule cannot be loaded by Rdkit. Returning None")
return None
pairwise_features = []
# We compute pairwise contact fingerprints
# centroid = compute_contact_centroid(fragments, cutoff=self.cutoff)
if self.reduce_to_contacts:
fragments = reduce_molecular_complex_to_contacts(fragments, self.cutoff)
# We compute pairwise contact fingerprints
for (frag1_ind, frag2_ind) in itertools.combinations(
range(len(fragments)), 2):
frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind]
distances = compute_pairwise_distances(frag1[0], frag2[0])
# frag1_xyz = subtract_centroid(frag1[0], centroid)
# frag2_xyz = subtract_centroid(frag2[0], centroid)
# xyzs = [frag1_xyz, frag2_xyz]
# rdks = [frag1[1], frag2[1]]
pairwise_features.append(
np.concatenate(
[
np.array([len(hbond_list)])
for hbond_list in compute_hydrogen_bonds(
frag1, frag2, distances, self.distance_bins,
self.angle_cutoffs)
],
axis=-1))
# Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis.
return np.concatenate(pairwise_features, axis=-1)
|
[
"def",
"_featurize",
"(",
"self",
",",
"datapoint",
",",
"*",
"*",
"kwargs",
")",
":",
"# -> Optional[np.ndarray]:",
"if",
"'complex'",
"in",
"kwargs",
":",
"datapoint",
"=",
"kwargs",
".",
"get",
"(",
"\"complex\"",
")",
"raise",
"DeprecationWarning",
"(",
"'Complex is being phased out as a parameter, please pass \"datapoint\" instead.'",
")",
"try",
":",
"fragments",
"=",
"rdkit_utils",
".",
"load_complex",
"(",
"datapoint",
",",
"add_hydrogens",
"=",
"False",
")",
"except",
"MoleculeLoadException",
":",
"logger",
".",
"warning",
"(",
"\"This molecule cannot be loaded by Rdkit. Returning None\"",
")",
"return",
"None",
"pairwise_features",
"=",
"[",
"]",
"# We compute pairwise contact fingerprints",
"# centroid = compute_contact_centroid(fragments, cutoff=self.cutoff)",
"if",
"self",
".",
"reduce_to_contacts",
":",
"fragments",
"=",
"reduce_molecular_complex_to_contacts",
"(",
"fragments",
",",
"self",
".",
"cutoff",
")",
"# We compute pairwise contact fingerprints",
"for",
"(",
"frag1_ind",
",",
"frag2_ind",
")",
"in",
"itertools",
".",
"combinations",
"(",
"range",
"(",
"len",
"(",
"fragments",
")",
")",
",",
"2",
")",
":",
"frag1",
",",
"frag2",
"=",
"fragments",
"[",
"frag1_ind",
"]",
",",
"fragments",
"[",
"frag2_ind",
"]",
"distances",
"=",
"compute_pairwise_distances",
"(",
"frag1",
"[",
"0",
"]",
",",
"frag2",
"[",
"0",
"]",
")",
"# frag1_xyz = subtract_centroid(frag1[0], centroid)",
"# frag2_xyz = subtract_centroid(frag2[0], centroid)",
"# xyzs = [frag1_xyz, frag2_xyz]",
"# rdks = [frag1[1], frag2[1]]",
"pairwise_features",
".",
"append",
"(",
"np",
".",
"concatenate",
"(",
"[",
"np",
".",
"array",
"(",
"[",
"len",
"(",
"hbond_list",
")",
"]",
")",
"for",
"hbond_list",
"in",
"compute_hydrogen_bonds",
"(",
"frag1",
",",
"frag2",
",",
"distances",
",",
"self",
".",
"distance_bins",
",",
"self",
".",
"angle_cutoffs",
")",
"]",
",",
"axis",
"=",
"-",
"1",
")",
")",
"# Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis.",
"return",
"np",
".",
"concatenate",
"(",
"pairwise_features",
",",
"axis",
"=",
"-",
"1",
")"
] |
https://github.com/deepchem/deepchem/blob/054eb4b2b082e3df8e1a8e77f36a52137ae6e375/deepchem/feat/complex_featurizers/grid_featurizers.py#L482-L526
|
|
enthought/mayavi
|
2103a273568b8f0bd62328801aafbd6252543ae8
|
examples/mayavi/advanced_visualization/mlab_3D_to_2D.py
|
python
|
apply_transform_to_points
|
(points, trans_mat)
|
return np.dot(trans_mat, points.T).T
|
a function that applies a 4x4 transformation matrix to an of
homogeneous points. The array of points should have shape Nx4
|
a function that applies a 4x4 transformation matrix to an of
homogeneous points. The array of points should have shape Nx4
|
[
"a",
"function",
"that",
"applies",
"a",
"4x4",
"transformation",
"matrix",
"to",
"an",
"of",
"homogeneous",
"points",
".",
"The",
"array",
"of",
"points",
"should",
"have",
"shape",
"Nx4"
] |
def apply_transform_to_points(points, trans_mat):
"""a function that applies a 4x4 transformation matrix to an of
homogeneous points. The array of points should have shape Nx4"""
if not trans_mat.shape == (4, 4):
raise ValueError('transform matrix must be 4x4')
if not points.shape[1] == 4:
raise ValueError('point array must have shape Nx4')
return np.dot(trans_mat, points.T).T
|
[
"def",
"apply_transform_to_points",
"(",
"points",
",",
"trans_mat",
")",
":",
"if",
"not",
"trans_mat",
".",
"shape",
"==",
"(",
"4",
",",
"4",
")",
":",
"raise",
"ValueError",
"(",
"'transform matrix must be 4x4'",
")",
"if",
"not",
"points",
".",
"shape",
"[",
"1",
"]",
"==",
"4",
":",
"raise",
"ValueError",
"(",
"'point array must have shape Nx4'",
")",
"return",
"np",
".",
"dot",
"(",
"trans_mat",
",",
"points",
".",
"T",
")",
".",
"T"
] |
https://github.com/enthought/mayavi/blob/2103a273568b8f0bd62328801aafbd6252543ae8/examples/mayavi/advanced_visualization/mlab_3D_to_2D.py#L175-L185
|
|
1012598167/flask_mongodb_game
|
60c7e0351586656ec38f851592886338e50b4110
|
python_flask/venv/Lib/site-packages/click/core.py
|
python
|
Context.find_root
|
(self)
|
return node
|
Finds the outermost context.
|
Finds the outermost context.
|
[
"Finds",
"the",
"outermost",
"context",
"."
] |
def find_root(self):
"""Finds the outermost context."""
node = self
while node.parent is not None:
node = node.parent
return node
|
[
"def",
"find_root",
"(",
"self",
")",
":",
"node",
"=",
"self",
"while",
"node",
".",
"parent",
"is",
"not",
"None",
":",
"node",
"=",
"node",
".",
"parent",
"return",
"node"
] |
https://github.com/1012598167/flask_mongodb_game/blob/60c7e0351586656ec38f851592886338e50b4110/python_flask/venv/Lib/site-packages/click/core.py#L456-L461
|
|
kivy/plyer
|
7a71707bdf99979cdacf2240d823aefa13d18f00
|
plyer/platforms/win/libs/wifi_defs.py
|
python
|
_connect
|
(network, parameters)
|
Attempts to connect to a specific network.
|
Attempts to connect to a specific network.
|
[
"Attempts",
"to",
"connect",
"to",
"a",
"specific",
"network",
"."
] |
def _connect(network, parameters):
'''
Attempts to connect to a specific network.
'''
global _dict
wireless_interface = _dict[network]
wcp = WLAN_CONNECTION_PARAMETERS()
connection_mode = parameters['connection_mode']
wcp.wlanConnectionMode = WLAN_CONNECTION_MODE(connection_mode)
if connection_mode == 0 or connection_mode == 1:
wcp.strProfile = LPCWSTR(parameters["profile"])
else:
wcp.strProfile = None
dot11Ssid = DOT11_SSID()
try:
dot11Ssid.SSID = parameters["ssid"]
dot11Ssid.SSIDLength = len(parameters["ssid"])
except KeyError:
dot11Ssid.SSID = network
dot11Ssid.SSIDLength = len(network)
wcp.pDot11Ssid = pointer(dot11Ssid)
dot11bssid = DOT11_BSSID_LIST()
bssid = parameters["bssidList"]
dot11bssid.Header = bssid['Header']
dot11bssid.uNumOfEntries = bssid['uNumOfEntries']
dot11bssid.uTotalNumOfEntries = bssid['uTotalNumOfEntries']
dot11bssid.BSSIDs = bssid['BSSIDs']
wcp.pDesiredBssidList = pointer(dot11bssid)
bssType = parameters["bssType"]
wcp.dot11BssType = DOT11_BSS_TYPE(bssType)
wcp.dwFlags = DWORD(parameters["flags"])
NegotiatedVersion = DWORD()
ClientHandle = HANDLE()
wlan = WlanOpenHandle(1,
None,
byref(NegotiatedVersion),
byref(ClientHandle))
if wlan:
sys_exit(FormatError(wlan))
pInterfaceList = pointer(WLAN_INTERFACE_INFO_LIST())
wlan = WlanEnumInterfaces(ClientHandle, None, byref(pInterfaceList))
if wlan:
sys_exit(FormatError(wlan))
try:
wlan = WlanConnect(ClientHandle,
wireless_interface,
wcp,
None)
if wlan:
sys_exit(FormatError(wlan))
WlanCloseHandle(ClientHandle, None)
finally:
WlanFreeMemory(pInterfaceList)
|
[
"def",
"_connect",
"(",
"network",
",",
"parameters",
")",
":",
"global",
"_dict",
"wireless_interface",
"=",
"_dict",
"[",
"network",
"]",
"wcp",
"=",
"WLAN_CONNECTION_PARAMETERS",
"(",
")",
"connection_mode",
"=",
"parameters",
"[",
"'connection_mode'",
"]",
"wcp",
".",
"wlanConnectionMode",
"=",
"WLAN_CONNECTION_MODE",
"(",
"connection_mode",
")",
"if",
"connection_mode",
"==",
"0",
"or",
"connection_mode",
"==",
"1",
":",
"wcp",
".",
"strProfile",
"=",
"LPCWSTR",
"(",
"parameters",
"[",
"\"profile\"",
"]",
")",
"else",
":",
"wcp",
".",
"strProfile",
"=",
"None",
"dot11Ssid",
"=",
"DOT11_SSID",
"(",
")",
"try",
":",
"dot11Ssid",
".",
"SSID",
"=",
"parameters",
"[",
"\"ssid\"",
"]",
"dot11Ssid",
".",
"SSIDLength",
"=",
"len",
"(",
"parameters",
"[",
"\"ssid\"",
"]",
")",
"except",
"KeyError",
":",
"dot11Ssid",
".",
"SSID",
"=",
"network",
"dot11Ssid",
".",
"SSIDLength",
"=",
"len",
"(",
"network",
")",
"wcp",
".",
"pDot11Ssid",
"=",
"pointer",
"(",
"dot11Ssid",
")",
"dot11bssid",
"=",
"DOT11_BSSID_LIST",
"(",
")",
"bssid",
"=",
"parameters",
"[",
"\"bssidList\"",
"]",
"dot11bssid",
".",
"Header",
"=",
"bssid",
"[",
"'Header'",
"]",
"dot11bssid",
".",
"uNumOfEntries",
"=",
"bssid",
"[",
"'uNumOfEntries'",
"]",
"dot11bssid",
".",
"uTotalNumOfEntries",
"=",
"bssid",
"[",
"'uTotalNumOfEntries'",
"]",
"dot11bssid",
".",
"BSSIDs",
"=",
"bssid",
"[",
"'BSSIDs'",
"]",
"wcp",
".",
"pDesiredBssidList",
"=",
"pointer",
"(",
"dot11bssid",
")",
"bssType",
"=",
"parameters",
"[",
"\"bssType\"",
"]",
"wcp",
".",
"dot11BssType",
"=",
"DOT11_BSS_TYPE",
"(",
"bssType",
")",
"wcp",
".",
"dwFlags",
"=",
"DWORD",
"(",
"parameters",
"[",
"\"flags\"",
"]",
")",
"NegotiatedVersion",
"=",
"DWORD",
"(",
")",
"ClientHandle",
"=",
"HANDLE",
"(",
")",
"wlan",
"=",
"WlanOpenHandle",
"(",
"1",
",",
"None",
",",
"byref",
"(",
"NegotiatedVersion",
")",
",",
"byref",
"(",
"ClientHandle",
")",
")",
"if",
"wlan",
":",
"sys_exit",
"(",
"FormatError",
"(",
"wlan",
")",
")",
"pInterfaceList",
"=",
"pointer",
"(",
"WLAN_INTERFACE_INFO_LIST",
"(",
")",
")",
"wlan",
"=",
"WlanEnumInterfaces",
"(",
"ClientHandle",
",",
"None",
",",
"byref",
"(",
"pInterfaceList",
")",
")",
"if",
"wlan",
":",
"sys_exit",
"(",
"FormatError",
"(",
"wlan",
")",
")",
"try",
":",
"wlan",
"=",
"WlanConnect",
"(",
"ClientHandle",
",",
"wireless_interface",
",",
"wcp",
",",
"None",
")",
"if",
"wlan",
":",
"sys_exit",
"(",
"FormatError",
"(",
"wlan",
")",
")",
"WlanCloseHandle",
"(",
"ClientHandle",
",",
"None",
")",
"finally",
":",
"WlanFreeMemory",
"(",
"pInterfaceList",
")"
] |
https://github.com/kivy/plyer/blob/7a71707bdf99979cdacf2240d823aefa13d18f00/plyer/platforms/win/libs/wifi_defs.py#L289-L351
|
||
theotherp/nzbhydra
|
4b03d7f769384b97dfc60dade4806c0fc987514e
|
libs/cffi/api.py
|
python
|
FFI.verify
|
(self, source='', tmpdir=None, **kwargs)
|
return lib
|
Verify that the current ffi signatures compile on this
machine, and return a dynamic library object. The dynamic
library can be used to call functions and access global
variables declared in this 'ffi'. The library is compiled
by the C compiler: it gives you C-level API compatibility
(including calling macros). This is unlike 'ffi.dlopen()',
which requires binary compatibility in the signatures.
|
Verify that the current ffi signatures compile on this
machine, and return a dynamic library object. The dynamic
library can be used to call functions and access global
variables declared in this 'ffi'. The library is compiled
by the C compiler: it gives you C-level API compatibility
(including calling macros). This is unlike 'ffi.dlopen()',
which requires binary compatibility in the signatures.
|
[
"Verify",
"that",
"the",
"current",
"ffi",
"signatures",
"compile",
"on",
"this",
"machine",
"and",
"return",
"a",
"dynamic",
"library",
"object",
".",
"The",
"dynamic",
"library",
"can",
"be",
"used",
"to",
"call",
"functions",
"and",
"access",
"global",
"variables",
"declared",
"in",
"this",
"ffi",
".",
"The",
"library",
"is",
"compiled",
"by",
"the",
"C",
"compiler",
":",
"it",
"gives",
"you",
"C",
"-",
"level",
"API",
"compatibility",
"(",
"including",
"calling",
"macros",
")",
".",
"This",
"is",
"unlike",
"ffi",
".",
"dlopen",
"()",
"which",
"requires",
"binary",
"compatibility",
"in",
"the",
"signatures",
"."
] |
def verify(self, source='', tmpdir=None, **kwargs):
"""Verify that the current ffi signatures compile on this
machine, and return a dynamic library object. The dynamic
library can be used to call functions and access global
variables declared in this 'ffi'. The library is compiled
by the C compiler: it gives you C-level API compatibility
(including calling macros). This is unlike 'ffi.dlopen()',
which requires binary compatibility in the signatures.
"""
from .verifier import Verifier, _caller_dir_pycache
#
# If set_unicode(True) was called, insert the UNICODE and
# _UNICODE macro declarations
if self._windows_unicode:
self._apply_windows_unicode(kwargs)
#
# Set the tmpdir here, and not in Verifier.__init__: it picks
# up the caller's directory, which we want to be the caller of
# ffi.verify(), as opposed to the caller of Veritier().
tmpdir = tmpdir or _caller_dir_pycache()
#
# Make a Verifier() and use it to load the library.
self.verifier = Verifier(self, source, tmpdir, **kwargs)
lib = self.verifier.load_library()
#
# Save the loaded library for keep-alive purposes, even
# if the caller doesn't keep it alive itself (it should).
self._libraries.append(lib)
return lib
|
[
"def",
"verify",
"(",
"self",
",",
"source",
"=",
"''",
",",
"tmpdir",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
".",
"verifier",
"import",
"Verifier",
",",
"_caller_dir_pycache",
"#",
"# If set_unicode(True) was called, insert the UNICODE and",
"# _UNICODE macro declarations",
"if",
"self",
".",
"_windows_unicode",
":",
"self",
".",
"_apply_windows_unicode",
"(",
"kwargs",
")",
"#",
"# Set the tmpdir here, and not in Verifier.__init__: it picks",
"# up the caller's directory, which we want to be the caller of",
"# ffi.verify(), as opposed to the caller of Veritier().",
"tmpdir",
"=",
"tmpdir",
"or",
"_caller_dir_pycache",
"(",
")",
"#",
"# Make a Verifier() and use it to load the library.",
"self",
".",
"verifier",
"=",
"Verifier",
"(",
"self",
",",
"source",
",",
"tmpdir",
",",
"*",
"*",
"kwargs",
")",
"lib",
"=",
"self",
".",
"verifier",
".",
"load_library",
"(",
")",
"#",
"# Save the loaded library for keep-alive purposes, even",
"# if the caller doesn't keep it alive itself (it should).",
"self",
".",
"_libraries",
".",
"append",
"(",
"lib",
")",
"return",
"lib"
] |
https://github.com/theotherp/nzbhydra/blob/4b03d7f769384b97dfc60dade4806c0fc987514e/libs/cffi/api.py#L398-L426
|
|
robhagemans/pcbasic
|
c3a043b46af66623a801e18a38175be077251ada
|
pcbasic/config.py
|
python
|
ArgumentParser._parse_args
|
(self, remaining)
|
return args
|
Process command line options.
|
Process command line options.
|
[
"Process",
"command",
"line",
"options",
"."
] |
def _parse_args(self, remaining):
"""Process command line options."""
# set arguments
known = list(ARGUMENTS.keys()) + list(range(NUM_POSITIONAL))
args = {d: remaining[d] for d in remaining if d in known}
not_recognised = {d: remaining[d] for d in remaining if d not in known}
for d in not_recognised:
if not_recognised[d]:
if isinstance(d, int):
logging.warning(
u'Ignored surplus positional command-line argument #%s: `%s`', d, not_recognised[d]
)
else:
logging.warning(
u'Ignored unrecognised command-line argument `%s=%s`', d, not_recognised[d]
)
else:
logging.warning(u'Ignored unrecognised command-line argument `%s`', d)
return args
|
[
"def",
"_parse_args",
"(",
"self",
",",
"remaining",
")",
":",
"# set arguments",
"known",
"=",
"list",
"(",
"ARGUMENTS",
".",
"keys",
"(",
")",
")",
"+",
"list",
"(",
"range",
"(",
"NUM_POSITIONAL",
")",
")",
"args",
"=",
"{",
"d",
":",
"remaining",
"[",
"d",
"]",
"for",
"d",
"in",
"remaining",
"if",
"d",
"in",
"known",
"}",
"not_recognised",
"=",
"{",
"d",
":",
"remaining",
"[",
"d",
"]",
"for",
"d",
"in",
"remaining",
"if",
"d",
"not",
"in",
"known",
"}",
"for",
"d",
"in",
"not_recognised",
":",
"if",
"not_recognised",
"[",
"d",
"]",
":",
"if",
"isinstance",
"(",
"d",
",",
"int",
")",
":",
"logging",
".",
"warning",
"(",
"u'Ignored surplus positional command-line argument #%s: `%s`'",
",",
"d",
",",
"not_recognised",
"[",
"d",
"]",
")",
"else",
":",
"logging",
".",
"warning",
"(",
"u'Ignored unrecognised command-line argument `%s=%s`'",
",",
"d",
",",
"not_recognised",
"[",
"d",
"]",
")",
"else",
":",
"logging",
".",
"warning",
"(",
"u'Ignored unrecognised command-line argument `%s`'",
",",
"d",
")",
"return",
"args"
] |
https://github.com/robhagemans/pcbasic/blob/c3a043b46af66623a801e18a38175be077251ada/pcbasic/config.py#L1113-L1131
|
|
beeware/ouroboros
|
a29123c6fab6a807caffbb7587cf548e0c370296
|
ouroboros/logging/__init__.py
|
python
|
Logger.info
|
(self, msg, *args, **kwargs)
|
Log 'msg % args' with severity 'INFO'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.info("Houston, we have a %s", "interesting problem", exc_info=1)
|
Log 'msg % args' with severity 'INFO'.
|
[
"Log",
"msg",
"%",
"args",
"with",
"severity",
"INFO",
"."
] |
def info(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'INFO'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.info("Houston, we have a %s", "interesting problem", exc_info=1)
"""
if self.isEnabledFor(INFO):
self._log(INFO, msg, args, **kwargs)
|
[
"def",
"info",
"(",
"self",
",",
"msg",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"isEnabledFor",
"(",
"INFO",
")",
":",
"self",
".",
"_log",
"(",
"INFO",
",",
"msg",
",",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/beeware/ouroboros/blob/a29123c6fab6a807caffbb7587cf548e0c370296/ouroboros/logging/__init__.py#L1269-L1279
|
||
tp4a/teleport
|
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
|
server/www/packages/packages-darwin/x64/qrcode/image/pure.py
|
python
|
PymagingImage.check_kind
|
(self, kind, transform=None, **kwargs)
|
return super(PymagingImage, self).check_kind(
kind, transform=transform, **kwargs)
|
pymaging (pymaging_png at least) uses lower case for the type.
|
pymaging (pymaging_png at least) uses lower case for the type.
|
[
"pymaging",
"(",
"pymaging_png",
"at",
"least",
")",
"uses",
"lower",
"case",
"for",
"the",
"type",
"."
] |
def check_kind(self, kind, transform=None, **kwargs):
"""
pymaging (pymaging_png at least) uses lower case for the type.
"""
if transform is None:
transform = lambda x: x.lower()
return super(PymagingImage, self).check_kind(
kind, transform=transform, **kwargs)
|
[
"def",
"check_kind",
"(",
"self",
",",
"kind",
",",
"transform",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"transform",
"is",
"None",
":",
"transform",
"=",
"lambda",
"x",
":",
"x",
".",
"lower",
"(",
")",
"return",
"super",
"(",
"PymagingImage",
",",
"self",
")",
".",
"check_kind",
"(",
"kind",
",",
"transform",
"=",
"transform",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-darwin/x64/qrcode/image/pure.py#L42-L49
|
|
google/clusterfuzz
|
f358af24f414daa17a3649b143e71ea71871ef59
|
src/clusterfuzz/_internal/bot/fuzzers/libfuzzer.py
|
python
|
use_radamsa_mutator_plugin
|
(extra_env)
|
return True
|
Decide whether to use Radamsa in process. If yes, add the path to the
radamsa shared object to LD_PRELOAD in |extra_env| and return True.
|
Decide whether to use Radamsa in process. If yes, add the path to the
radamsa shared object to LD_PRELOAD in |extra_env| and return True.
|
[
"Decide",
"whether",
"to",
"use",
"Radamsa",
"in",
"process",
".",
"If",
"yes",
"add",
"the",
"path",
"to",
"the",
"radamsa",
"shared",
"object",
"to",
"LD_PRELOAD",
"in",
"|extra_env|",
"and",
"return",
"True",
"."
] |
def use_radamsa_mutator_plugin(extra_env):
"""Decide whether to use Radamsa in process. If yes, add the path to the
radamsa shared object to LD_PRELOAD in |extra_env| and return True."""
# Radamsa will only work on LINUX ASAN jobs.
# TODO(mpherman): Include architecture info in job definition and exclude
# i386.
if environment.is_lib() or not is_linux_asan():
return False
radamsa_path = os.path.join(environment.get_platform_resources_directory(),
'radamsa', 'libradamsa.so')
logs.log('Using Radamsa mutator plugin : %s' % radamsa_path)
extra_env['LD_PRELOAD'] = radamsa_path
return True
|
[
"def",
"use_radamsa_mutator_plugin",
"(",
"extra_env",
")",
":",
"# Radamsa will only work on LINUX ASAN jobs.",
"# TODO(mpherman): Include architecture info in job definition and exclude",
"# i386.",
"if",
"environment",
".",
"is_lib",
"(",
")",
"or",
"not",
"is_linux_asan",
"(",
")",
":",
"return",
"False",
"radamsa_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"environment",
".",
"get_platform_resources_directory",
"(",
")",
",",
"'radamsa'",
",",
"'libradamsa.so'",
")",
"logs",
".",
"log",
"(",
"'Using Radamsa mutator plugin : %s'",
"%",
"radamsa_path",
")",
"extra_env",
"[",
"'LD_PRELOAD'",
"]",
"=",
"radamsa_path",
"return",
"True"
] |
https://github.com/google/clusterfuzz/blob/f358af24f414daa17a3649b143e71ea71871ef59/src/clusterfuzz/_internal/bot/fuzzers/libfuzzer.py#L1883-L1897
|
|
enthought/mayavi
|
2103a273568b8f0bd62328801aafbd6252543ae8
|
tvtk/util/gradient_editor.py
|
python
|
GradientTable.update
|
(self)
|
Recalculate the gradient table from the control points. The
colors are interpolated linearly between each two control
points in hsva space.
|
Recalculate the gradient table from the control points. The
colors are interpolated linearly between each two control
points in hsva space.
|
[
"Recalculate",
"the",
"gradient",
"table",
"from",
"the",
"control",
"points",
".",
"The",
"colors",
"are",
"interpolated",
"linearly",
"between",
"each",
"two",
"control",
"points",
"in",
"hsva",
"space",
"."
] |
def update(self):
"""Recalculate the gradient table from the control points. The
colors are interpolated linearly between each two control
points in hsva space.
"""
#self.sort_control_points()
table = self.table
alpha = self.alpha
table.remove_all_points()
alpha.remove_all_points()
for point in self.control_points:
x = point.pos
h, s, v, a = point.color.get_hsva()
if point.active_channels != 'a':
table.add_hsv_point(x, h, s, v)
if 'a' in point.active_channels:
alpha.add_point(x, a)
|
[
"def",
"update",
"(",
"self",
")",
":",
"#self.sort_control_points()",
"table",
"=",
"self",
".",
"table",
"alpha",
"=",
"self",
".",
"alpha",
"table",
".",
"remove_all_points",
"(",
")",
"alpha",
".",
"remove_all_points",
"(",
")",
"for",
"point",
"in",
"self",
".",
"control_points",
":",
"x",
"=",
"point",
".",
"pos",
"h",
",",
"s",
",",
"v",
",",
"a",
"=",
"point",
".",
"color",
".",
"get_hsva",
"(",
")",
"if",
"point",
".",
"active_channels",
"!=",
"'a'",
":",
"table",
".",
"add_hsv_point",
"(",
"x",
",",
"h",
",",
"s",
",",
"v",
")",
"if",
"'a'",
"in",
"point",
".",
"active_channels",
":",
"alpha",
".",
"add_point",
"(",
"x",
",",
"a",
")"
] |
https://github.com/enthought/mayavi/blob/2103a273568b8f0bd62328801aafbd6252543ae8/tvtk/util/gradient_editor.py#L263-L280
|
||
samuelclay/NewsBlur
|
2c45209df01a1566ea105e04d499367f32ac9ad2
|
vendor/reverend/thomas.py
|
python
|
Bayes.robinson
|
(self, probs, ignore)
|
return (1 + S) / 2
|
computes the probability of a message being spam (Robinson's method)
P = 1 - prod(1-p)^(1/n)
Q = 1 - prod(p)^(1/n)
S = (1 + (P-Q)/(P+Q)) / 2
Courtesy of http://christophe.delord.free.fr/en/index.html
|
computes the probability of a message being spam (Robinson's method)
P = 1 - prod(1-p)^(1/n)
Q = 1 - prod(p)^(1/n)
S = (1 + (P-Q)/(P+Q)) / 2
Courtesy of http://christophe.delord.free.fr/en/index.html
|
[
"computes",
"the",
"probability",
"of",
"a",
"message",
"being",
"spam",
"(",
"Robinson",
"s",
"method",
")",
"P",
"=",
"1",
"-",
"prod",
"(",
"1",
"-",
"p",
")",
"^",
"(",
"1",
"/",
"n",
")",
"Q",
"=",
"1",
"-",
"prod",
"(",
"p",
")",
"^",
"(",
"1",
"/",
"n",
")",
"S",
"=",
"(",
"1",
"+",
"(",
"P",
"-",
"Q",
")",
"/",
"(",
"P",
"+",
"Q",
"))",
"/",
"2",
"Courtesy",
"of",
"http",
":",
"//",
"christophe",
".",
"delord",
".",
"free",
".",
"fr",
"/",
"en",
"/",
"index",
".",
"html"
] |
def robinson(self, probs, ignore):
""" computes the probability of a message being spam (Robinson's method)
P = 1 - prod(1-p)^(1/n)
Q = 1 - prod(p)^(1/n)
S = (1 + (P-Q)/(P+Q)) / 2
Courtesy of http://christophe.delord.free.fr/en/index.html
"""
nth = 1./len(probs)
P = 1.0 - reduce(operator.mul, [1.0-p[1] for p in probs], 1.0) ** nth
Q = 1.0 - reduce(operator.mul, [p[1] for p in probs]) ** nth
S = (P - Q) / (P + Q)
return (1 + S) / 2
|
[
"def",
"robinson",
"(",
"self",
",",
"probs",
",",
"ignore",
")",
":",
"nth",
"=",
"1.",
"/",
"len",
"(",
"probs",
")",
"P",
"=",
"1.0",
"-",
"reduce",
"(",
"operator",
".",
"mul",
",",
"[",
"1.0",
"-",
"p",
"[",
"1",
"]",
"for",
"p",
"in",
"probs",
"]",
",",
"1.0",
")",
"**",
"nth",
"Q",
"=",
"1.0",
"-",
"reduce",
"(",
"operator",
".",
"mul",
",",
"[",
"p",
"[",
"1",
"]",
"for",
"p",
"in",
"probs",
"]",
")",
"**",
"nth",
"S",
"=",
"(",
"P",
"-",
"Q",
")",
"/",
"(",
"P",
"+",
"Q",
")",
"return",
"(",
"1",
"+",
"S",
")",
"/",
"2"
] |
https://github.com/samuelclay/NewsBlur/blob/2c45209df01a1566ea105e04d499367f32ac9ad2/vendor/reverend/thomas.py#L261-L273
|
|
ring04h/wyportmap
|
c4201e2313504e780a7f25238eba2a2d3223e739
|
sqlalchemy/sql/elements.py
|
python
|
between
|
(expr, lower_bound, upper_bound, symmetric=False)
|
return expr.between(lower_bound, upper_bound, symmetric=symmetric)
|
Produce a ``BETWEEN`` predicate clause.
E.g.::
from sqlalchemy import between
stmt = select([users_table]).where(between(users_table.c.id, 5, 7))
Would produce SQL resembling::
SELECT id, name FROM user WHERE id BETWEEN :id_1 AND :id_2
The :func:`.between` function is a standalone version of the
:meth:`.ColumnElement.between` method available on all
SQL expressions, as in::
stmt = select([users_table]).where(users_table.c.id.between(5, 7))
All arguments passed to :func:`.between`, including the left side
column expression, are coerced from Python scalar values if a
the value is not a :class:`.ColumnElement` subclass. For example,
three fixed values can be compared as in::
print(between(5, 3, 7))
Which would produce::
:param_1 BETWEEN :param_2 AND :param_3
:param expr: a column expression, typically a :class:`.ColumnElement`
instance or alternatively a Python scalar expression to be coerced
into a column expression, serving as the left side of the ``BETWEEN``
expression.
:param lower_bound: a column or Python scalar expression serving as the
lower bound of the right side of the ``BETWEEN`` expression.
:param upper_bound: a column or Python scalar expression serving as the
upper bound of the right side of the ``BETWEEN`` expression.
:param symmetric: if True, will render " BETWEEN SYMMETRIC ". Note
that not all databases support this syntax.
.. versionadded:: 0.9.5
.. seealso::
:meth:`.ColumnElement.between`
|
Produce a ``BETWEEN`` predicate clause.
|
[
"Produce",
"a",
"BETWEEN",
"predicate",
"clause",
"."
] |
def between(expr, lower_bound, upper_bound, symmetric=False):
"""Produce a ``BETWEEN`` predicate clause.
E.g.::
from sqlalchemy import between
stmt = select([users_table]).where(between(users_table.c.id, 5, 7))
Would produce SQL resembling::
SELECT id, name FROM user WHERE id BETWEEN :id_1 AND :id_2
The :func:`.between` function is a standalone version of the
:meth:`.ColumnElement.between` method available on all
SQL expressions, as in::
stmt = select([users_table]).where(users_table.c.id.between(5, 7))
All arguments passed to :func:`.between`, including the left side
column expression, are coerced from Python scalar values if a
the value is not a :class:`.ColumnElement` subclass. For example,
three fixed values can be compared as in::
print(between(5, 3, 7))
Which would produce::
:param_1 BETWEEN :param_2 AND :param_3
:param expr: a column expression, typically a :class:`.ColumnElement`
instance or alternatively a Python scalar expression to be coerced
into a column expression, serving as the left side of the ``BETWEEN``
expression.
:param lower_bound: a column or Python scalar expression serving as the
lower bound of the right side of the ``BETWEEN`` expression.
:param upper_bound: a column or Python scalar expression serving as the
upper bound of the right side of the ``BETWEEN`` expression.
:param symmetric: if True, will render " BETWEEN SYMMETRIC ". Note
that not all databases support this syntax.
.. versionadded:: 0.9.5
.. seealso::
:meth:`.ColumnElement.between`
"""
expr = _literal_as_binds(expr)
return expr.between(lower_bound, upper_bound, symmetric=symmetric)
|
[
"def",
"between",
"(",
"expr",
",",
"lower_bound",
",",
"upper_bound",
",",
"symmetric",
"=",
"False",
")",
":",
"expr",
"=",
"_literal_as_binds",
"(",
"expr",
")",
"return",
"expr",
".",
"between",
"(",
"lower_bound",
",",
"upper_bound",
",",
"symmetric",
"=",
"symmetric",
")"
] |
https://github.com/ring04h/wyportmap/blob/c4201e2313504e780a7f25238eba2a2d3223e739/sqlalchemy/sql/elements.py#L52-L103
|
|
pyparallel/pyparallel
|
11e8c6072d48c8f13641925d17b147bf36ee0ba3
|
Lib/site-packages/numpy-1.10.0.dev0_046311a-py3.3-win-amd64.egg/numpy/polynomial/polynomial.py
|
python
|
polyfit
|
(x, y, deg, rcond=None, full=False, w=None)
|
Least-squares fit of a polynomial to data.
Return the coefficients of a polynomial of degree `deg` that is the
least squares fit to the data values `y` given at points `x`. If `y` is
1-D the returned coefficients will also be 1-D. If `y` is 2-D multiple
fits are done, one for each column of `y`, and the resulting
coefficients are stored in the corresponding columns of a 2-D return.
The fitted polynomial(s) are in the form
.. math:: p(x) = c_0 + c_1 * x + ... + c_n * x^n,
where `n` is `deg`.
Parameters
----------
x : array_like, shape (`M`,)
x-coordinates of the `M` sample (data) points ``(x[i], y[i])``.
y : array_like, shape (`M`,) or (`M`, `K`)
y-coordinates of the sample points. Several sets of sample points
sharing the same x-coordinates can be (independently) fit with one
call to `polyfit` by passing in for `y` a 2-D array that contains
one data set per column.
deg : int
Degree of the polynomial(s) to be fit.
rcond : float, optional
Relative condition number of the fit. Singular values smaller
than `rcond`, relative to the largest singular value, will be
ignored. The default value is ``len(x)*eps``, where `eps` is the
relative precision of the platform's float type, about 2e-16 in
most cases.
full : bool, optional
Switch determining the nature of the return value. When ``False``
(the default) just the coefficients are returned; when ``True``,
diagnostic information from the singular value decomposition (used
to solve the fit's matrix equation) is also returned.
w : array_like, shape (`M`,), optional
Weights. If not None, the contribution of each point
``(x[i],y[i])`` to the fit is weighted by `w[i]`. Ideally the
weights are chosen so that the errors of the products ``w[i]*y[i]``
all have the same variance. The default value is None.
.. versionadded:: 1.5.0
Returns
-------
coef : ndarray, shape (`deg` + 1,) or (`deg` + 1, `K`)
Polynomial coefficients ordered from low to high. If `y` was 2-D,
the coefficients in column `k` of `coef` represent the polynomial
fit to the data in `y`'s `k`-th column.
[residuals, rank, singular_values, rcond] : list
These values are only returned if `full` = True
resid -- sum of squared residuals of the least squares fit
rank -- the numerical rank of the scaled Vandermonde matrix
sv -- singular values of the scaled Vandermonde matrix
rcond -- value of `rcond`.
For more details, see `linalg.lstsq`.
Raises
------
RankWarning
Raised if the matrix in the least-squares fit is rank deficient.
The warning is only raised if `full` == False. The warnings can
be turned off by:
>>> import warnings
>>> warnings.simplefilter('ignore', RankWarning)
See Also
--------
chebfit, legfit, lagfit, hermfit, hermefit
polyval : Evaluates a polynomial.
polyvander : Vandermonde matrix for powers.
linalg.lstsq : Computes a least-squares fit from the matrix.
scipy.interpolate.UnivariateSpline : Computes spline fits.
Notes
-----
The solution is the coefficients of the polynomial `p` that minimizes
the sum of the weighted squared errors
.. math :: E = \\sum_j w_j^2 * |y_j - p(x_j)|^2,
where the :math:`w_j` are the weights. This problem is solved by
setting up the (typically) over-determined matrix equation:
.. math :: V(x) * c = w * y,
where `V` is the weighted pseudo Vandermonde matrix of `x`, `c` are the
coefficients to be solved for, `w` are the weights, and `y` are the
observed values. This equation is then solved using the singular value
decomposition of `V`.
If some of the singular values of `V` are so small that they are
neglected (and `full` == ``False``), a `RankWarning` will be raised.
This means that the coefficient values may be poorly determined.
Fitting to a lower order polynomial will usually get rid of the warning
(but may not be what you want, of course; if you have independent
reason(s) for choosing the degree which isn't working, you may have to:
a) reconsider those reasons, and/or b) reconsider the quality of your
data). The `rcond` parameter can also be set to a value smaller than
its default, but the resulting fit may be spurious and have large
contributions from roundoff error.
Polynomial fits using double precision tend to "fail" at about
(polynomial) degree 20. Fits using Chebyshev or Legendre series are
generally better conditioned, but much can still depend on the
distribution of the sample points and the smoothness of the data. If
the quality of the fit is inadequate, splines may be a good
alternative.
Examples
--------
>>> from numpy.polynomial import polynomial as P
>>> x = np.linspace(-1,1,51) # x "data": [-1, -0.96, ..., 0.96, 1]
>>> y = x**3 - x + np.random.randn(len(x)) # x^3 - x + N(0,1) "noise"
>>> c, stats = P.polyfit(x,y,3,full=True)
>>> c # c[0], c[2] should be approx. 0, c[1] approx. -1, c[3] approx. 1
array([ 0.01909725, -1.30598256, -0.00577963, 1.02644286])
>>> stats # note the large SSR, explaining the rather poor results
[array([ 38.06116253]), 4, array([ 1.38446749, 1.32119158, 0.50443316,
0.28853036]), 1.1324274851176597e-014]
Same thing without the added noise
>>> y = x**3 - x
>>> c, stats = P.polyfit(x,y,3,full=True)
>>> c # c[0], c[2] should be "very close to 0", c[1] ~= -1, c[3] ~= 1
array([ -1.73362882e-17, -1.00000000e+00, -2.67471909e-16,
1.00000000e+00])
>>> stats # note the minuscule SSR
[array([ 7.46346754e-31]), 4, array([ 1.38446749, 1.32119158,
0.50443316, 0.28853036]), 1.1324274851176597e-014]
|
Least-squares fit of a polynomial to data.
|
[
"Least",
"-",
"squares",
"fit",
"of",
"a",
"polynomial",
"to",
"data",
"."
] |
def polyfit(x, y, deg, rcond=None, full=False, w=None):
"""
Least-squares fit of a polynomial to data.
Return the coefficients of a polynomial of degree `deg` that is the
least squares fit to the data values `y` given at points `x`. If `y` is
1-D the returned coefficients will also be 1-D. If `y` is 2-D multiple
fits are done, one for each column of `y`, and the resulting
coefficients are stored in the corresponding columns of a 2-D return.
The fitted polynomial(s) are in the form
.. math:: p(x) = c_0 + c_1 * x + ... + c_n * x^n,
where `n` is `deg`.
Parameters
----------
x : array_like, shape (`M`,)
x-coordinates of the `M` sample (data) points ``(x[i], y[i])``.
y : array_like, shape (`M`,) or (`M`, `K`)
y-coordinates of the sample points. Several sets of sample points
sharing the same x-coordinates can be (independently) fit with one
call to `polyfit` by passing in for `y` a 2-D array that contains
one data set per column.
deg : int
Degree of the polynomial(s) to be fit.
rcond : float, optional
Relative condition number of the fit. Singular values smaller
than `rcond`, relative to the largest singular value, will be
ignored. The default value is ``len(x)*eps``, where `eps` is the
relative precision of the platform's float type, about 2e-16 in
most cases.
full : bool, optional
Switch determining the nature of the return value. When ``False``
(the default) just the coefficients are returned; when ``True``,
diagnostic information from the singular value decomposition (used
to solve the fit's matrix equation) is also returned.
w : array_like, shape (`M`,), optional
Weights. If not None, the contribution of each point
``(x[i],y[i])`` to the fit is weighted by `w[i]`. Ideally the
weights are chosen so that the errors of the products ``w[i]*y[i]``
all have the same variance. The default value is None.
.. versionadded:: 1.5.0
Returns
-------
coef : ndarray, shape (`deg` + 1,) or (`deg` + 1, `K`)
Polynomial coefficients ordered from low to high. If `y` was 2-D,
the coefficients in column `k` of `coef` represent the polynomial
fit to the data in `y`'s `k`-th column.
[residuals, rank, singular_values, rcond] : list
These values are only returned if `full` = True
resid -- sum of squared residuals of the least squares fit
rank -- the numerical rank of the scaled Vandermonde matrix
sv -- singular values of the scaled Vandermonde matrix
rcond -- value of `rcond`.
For more details, see `linalg.lstsq`.
Raises
------
RankWarning
Raised if the matrix in the least-squares fit is rank deficient.
The warning is only raised if `full` == False. The warnings can
be turned off by:
>>> import warnings
>>> warnings.simplefilter('ignore', RankWarning)
See Also
--------
chebfit, legfit, lagfit, hermfit, hermefit
polyval : Evaluates a polynomial.
polyvander : Vandermonde matrix for powers.
linalg.lstsq : Computes a least-squares fit from the matrix.
scipy.interpolate.UnivariateSpline : Computes spline fits.
Notes
-----
The solution is the coefficients of the polynomial `p` that minimizes
the sum of the weighted squared errors
.. math :: E = \\sum_j w_j^2 * |y_j - p(x_j)|^2,
where the :math:`w_j` are the weights. This problem is solved by
setting up the (typically) over-determined matrix equation:
.. math :: V(x) * c = w * y,
where `V` is the weighted pseudo Vandermonde matrix of `x`, `c` are the
coefficients to be solved for, `w` are the weights, and `y` are the
observed values. This equation is then solved using the singular value
decomposition of `V`.
If some of the singular values of `V` are so small that they are
neglected (and `full` == ``False``), a `RankWarning` will be raised.
This means that the coefficient values may be poorly determined.
Fitting to a lower order polynomial will usually get rid of the warning
(but may not be what you want, of course; if you have independent
reason(s) for choosing the degree which isn't working, you may have to:
a) reconsider those reasons, and/or b) reconsider the quality of your
data). The `rcond` parameter can also be set to a value smaller than
its default, but the resulting fit may be spurious and have large
contributions from roundoff error.
Polynomial fits using double precision tend to "fail" at about
(polynomial) degree 20. Fits using Chebyshev or Legendre series are
generally better conditioned, but much can still depend on the
distribution of the sample points and the smoothness of the data. If
the quality of the fit is inadequate, splines may be a good
alternative.
Examples
--------
>>> from numpy.polynomial import polynomial as P
>>> x = np.linspace(-1,1,51) # x "data": [-1, -0.96, ..., 0.96, 1]
>>> y = x**3 - x + np.random.randn(len(x)) # x^3 - x + N(0,1) "noise"
>>> c, stats = P.polyfit(x,y,3,full=True)
>>> c # c[0], c[2] should be approx. 0, c[1] approx. -1, c[3] approx. 1
array([ 0.01909725, -1.30598256, -0.00577963, 1.02644286])
>>> stats # note the large SSR, explaining the rather poor results
[array([ 38.06116253]), 4, array([ 1.38446749, 1.32119158, 0.50443316,
0.28853036]), 1.1324274851176597e-014]
Same thing without the added noise
>>> y = x**3 - x
>>> c, stats = P.polyfit(x,y,3,full=True)
>>> c # c[0], c[2] should be "very close to 0", c[1] ~= -1, c[3] ~= 1
array([ -1.73362882e-17, -1.00000000e+00, -2.67471909e-16,
1.00000000e+00])
>>> stats # note the minuscule SSR
[array([ 7.46346754e-31]), 4, array([ 1.38446749, 1.32119158,
0.50443316, 0.28853036]), 1.1324274851176597e-014]
"""
order = int(deg) + 1
x = np.asarray(x) + 0.0
y = np.asarray(y) + 0.0
# check arguments.
if deg < 0:
raise ValueError("expected deg >= 0")
if x.ndim != 1:
raise TypeError("expected 1D vector for x")
if x.size == 0:
raise TypeError("expected non-empty vector for x")
if y.ndim < 1 or y.ndim > 2:
raise TypeError("expected 1D or 2D array for y")
if len(x) != len(y):
raise TypeError("expected x and y to have same length")
# set up the least squares matrices in transposed form
lhs = polyvander(x, deg).T
rhs = y.T
if w is not None:
w = np.asarray(w) + 0.0
if w.ndim != 1:
raise TypeError("expected 1D vector for w")
if len(x) != len(w):
raise TypeError("expected x and w to have same length")
# apply weights. Don't use inplace operations as they
# can cause problems with NA.
lhs = lhs * w
rhs = rhs * w
# set rcond
if rcond is None:
rcond = len(x)*np.finfo(x.dtype).eps
# Determine the norms of the design matrix columns.
if issubclass(lhs.dtype.type, np.complexfloating):
scl = np.sqrt((np.square(lhs.real) + np.square(lhs.imag)).sum(1))
else:
scl = np.sqrt(np.square(lhs).sum(1))
scl[scl == 0] = 1
# Solve the least squares problem.
c, resids, rank, s = la.lstsq(lhs.T/scl, rhs.T, rcond)
c = (c.T/scl).T
# warn on rank reduction
if rank != order and not full:
msg = "The fit may be poorly conditioned"
warnings.warn(msg, pu.RankWarning)
if full:
return c, [resids, rank, s, rcond]
else:
return c
|
[
"def",
"polyfit",
"(",
"x",
",",
"y",
",",
"deg",
",",
"rcond",
"=",
"None",
",",
"full",
"=",
"False",
",",
"w",
"=",
"None",
")",
":",
"order",
"=",
"int",
"(",
"deg",
")",
"+",
"1",
"x",
"=",
"np",
".",
"asarray",
"(",
"x",
")",
"+",
"0.0",
"y",
"=",
"np",
".",
"asarray",
"(",
"y",
")",
"+",
"0.0",
"# check arguments.",
"if",
"deg",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"\"expected deg >= 0\"",
")",
"if",
"x",
".",
"ndim",
"!=",
"1",
":",
"raise",
"TypeError",
"(",
"\"expected 1D vector for x\"",
")",
"if",
"x",
".",
"size",
"==",
"0",
":",
"raise",
"TypeError",
"(",
"\"expected non-empty vector for x\"",
")",
"if",
"y",
".",
"ndim",
"<",
"1",
"or",
"y",
".",
"ndim",
">",
"2",
":",
"raise",
"TypeError",
"(",
"\"expected 1D or 2D array for y\"",
")",
"if",
"len",
"(",
"x",
")",
"!=",
"len",
"(",
"y",
")",
":",
"raise",
"TypeError",
"(",
"\"expected x and y to have same length\"",
")",
"# set up the least squares matrices in transposed form",
"lhs",
"=",
"polyvander",
"(",
"x",
",",
"deg",
")",
".",
"T",
"rhs",
"=",
"y",
".",
"T",
"if",
"w",
"is",
"not",
"None",
":",
"w",
"=",
"np",
".",
"asarray",
"(",
"w",
")",
"+",
"0.0",
"if",
"w",
".",
"ndim",
"!=",
"1",
":",
"raise",
"TypeError",
"(",
"\"expected 1D vector for w\"",
")",
"if",
"len",
"(",
"x",
")",
"!=",
"len",
"(",
"w",
")",
":",
"raise",
"TypeError",
"(",
"\"expected x and w to have same length\"",
")",
"# apply weights. Don't use inplace operations as they",
"# can cause problems with NA.",
"lhs",
"=",
"lhs",
"*",
"w",
"rhs",
"=",
"rhs",
"*",
"w",
"# set rcond",
"if",
"rcond",
"is",
"None",
":",
"rcond",
"=",
"len",
"(",
"x",
")",
"*",
"np",
".",
"finfo",
"(",
"x",
".",
"dtype",
")",
".",
"eps",
"# Determine the norms of the design matrix columns.",
"if",
"issubclass",
"(",
"lhs",
".",
"dtype",
".",
"type",
",",
"np",
".",
"complexfloating",
")",
":",
"scl",
"=",
"np",
".",
"sqrt",
"(",
"(",
"np",
".",
"square",
"(",
"lhs",
".",
"real",
")",
"+",
"np",
".",
"square",
"(",
"lhs",
".",
"imag",
")",
")",
".",
"sum",
"(",
"1",
")",
")",
"else",
":",
"scl",
"=",
"np",
".",
"sqrt",
"(",
"np",
".",
"square",
"(",
"lhs",
")",
".",
"sum",
"(",
"1",
")",
")",
"scl",
"[",
"scl",
"==",
"0",
"]",
"=",
"1",
"# Solve the least squares problem.",
"c",
",",
"resids",
",",
"rank",
",",
"s",
"=",
"la",
".",
"lstsq",
"(",
"lhs",
".",
"T",
"/",
"scl",
",",
"rhs",
".",
"T",
",",
"rcond",
")",
"c",
"=",
"(",
"c",
".",
"T",
"/",
"scl",
")",
".",
"T",
"# warn on rank reduction",
"if",
"rank",
"!=",
"order",
"and",
"not",
"full",
":",
"msg",
"=",
"\"The fit may be poorly conditioned\"",
"warnings",
".",
"warn",
"(",
"msg",
",",
"pu",
".",
"RankWarning",
")",
"if",
"full",
":",
"return",
"c",
",",
"[",
"resids",
",",
"rank",
",",
"s",
",",
"rcond",
"]",
"else",
":",
"return",
"c"
] |
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/site-packages/numpy-1.10.0.dev0_046311a-py3.3-win-amd64.egg/numpy/polynomial/polynomial.py#L1196-L1388
|
||
wakatime/legacy-python-cli
|
9b64548b16ab5ef16603d9a6c2620a16d0df8d46
|
wakatime/packages/pytz/tzinfo.py
|
python
|
memorized_datetime
|
(seconds)
|
Create only one instance of each distinct datetime
|
Create only one instance of each distinct datetime
|
[
"Create",
"only",
"one",
"instance",
"of",
"each",
"distinct",
"datetime"
] |
def memorized_datetime(seconds):
'''Create only one instance of each distinct datetime'''
try:
return _datetime_cache[seconds]
except KeyError:
# NB. We can't just do datetime.utcfromtimestamp(seconds) as this
# fails with negative values under Windows (Bug #90096)
dt = _epoch + timedelta(seconds=seconds)
_datetime_cache[seconds] = dt
return dt
|
[
"def",
"memorized_datetime",
"(",
"seconds",
")",
":",
"try",
":",
"return",
"_datetime_cache",
"[",
"seconds",
"]",
"except",
"KeyError",
":",
"# NB. We can't just do datetime.utcfromtimestamp(seconds) as this",
"# fails with negative values under Windows (Bug #90096)",
"dt",
"=",
"_epoch",
"+",
"timedelta",
"(",
"seconds",
"=",
"seconds",
")",
"_datetime_cache",
"[",
"seconds",
"]",
"=",
"dt",
"return",
"dt"
] |
https://github.com/wakatime/legacy-python-cli/blob/9b64548b16ab5ef16603d9a6c2620a16d0df8d46/wakatime/packages/pytz/tzinfo.py#L27-L36
|
||
oracle/graalpython
|
577e02da9755d916056184ec441c26e00b70145c
|
graalpython/com.oracle.graal.python.benchmarks/python/micro/object-layout-change.py
|
python
|
Foo.mod
|
(self, b)
|
[] |
def mod(self, b):
self.b = b % 5
|
[
"def",
"mod",
"(",
"self",
",",
"b",
")",
":",
"self",
".",
"b",
"=",
"b",
"%",
"5"
] |
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/com.oracle.graal.python.benchmarks/python/micro/object-layout-change.py#L31-L32
|
||||
inspurer/WorkAttendanceSystem
|
1221e2d67bdf5bb15fe99517cc3ded58ccb066df
|
V2.0/venv/Lib/site-packages/pip-9.0.1-py3.5.egg/pip/_vendor/retrying.py
|
python
|
Retrying.fixed_sleep
|
(self, previous_attempt_number, delay_since_first_attempt_ms)
|
return self._wait_fixed
|
Sleep a fixed amount of time between each retry.
|
Sleep a fixed amount of time between each retry.
|
[
"Sleep",
"a",
"fixed",
"amount",
"of",
"time",
"between",
"each",
"retry",
"."
] |
def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Sleep a fixed amount of time between each retry."""
return self._wait_fixed
|
[
"def",
"fixed_sleep",
"(",
"self",
",",
"previous_attempt_number",
",",
"delay_since_first_attempt_ms",
")",
":",
"return",
"self",
".",
"_wait_fixed"
] |
https://github.com/inspurer/WorkAttendanceSystem/blob/1221e2d67bdf5bb15fe99517cc3ded58ccb066df/V2.0/venv/Lib/site-packages/pip-9.0.1-py3.5.egg/pip/_vendor/retrying.py#L153-L155
|
|
napari/napari
|
dbf4158e801fa7a429de8ef1cdee73bf6d64c61e
|
napari/components/experimental/chunk/_delay_queue.py
|
python
|
DelayQueue.cancel_requests
|
(
self, should_cancel: Callable[[ChunkRequest], bool]
)
|
return cancel
|
Cancel pending requests based on the given filter.
Parameters
----------
should_cancel : Callable[[ChunkRequest], bool]
Cancel the request if this returns True.
Returns
-------
List[ChunkRequests]
The requests that were cancelled, if any.
|
Cancel pending requests based on the given filter.
|
[
"Cancel",
"pending",
"requests",
"based",
"on",
"the",
"given",
"filter",
"."
] |
def cancel_requests(
self, should_cancel: Callable[[ChunkRequest], bool]
) -> List[ChunkRequest]:
"""Cancel pending requests based on the given filter.
Parameters
----------
should_cancel : Callable[[ChunkRequest], bool]
Cancel the request if this returns True.
Returns
-------
List[ChunkRequests]
The requests that were cancelled, if any.
"""
keep = []
cancel = []
with self._lock:
for entry in self._entries:
if should_cancel(entry.request):
cancel.append(entry.request)
else:
keep.append(entry)
self._entries = keep
return cancel
|
[
"def",
"cancel_requests",
"(",
"self",
",",
"should_cancel",
":",
"Callable",
"[",
"[",
"ChunkRequest",
"]",
",",
"bool",
"]",
")",
"->",
"List",
"[",
"ChunkRequest",
"]",
":",
"keep",
"=",
"[",
"]",
"cancel",
"=",
"[",
"]",
"with",
"self",
".",
"_lock",
":",
"for",
"entry",
"in",
"self",
".",
"_entries",
":",
"if",
"should_cancel",
"(",
"entry",
".",
"request",
")",
":",
"cancel",
".",
"append",
"(",
"entry",
".",
"request",
")",
"else",
":",
"keep",
".",
"append",
"(",
"entry",
")",
"self",
".",
"_entries",
"=",
"keep",
"return",
"cancel"
] |
https://github.com/napari/napari/blob/dbf4158e801fa7a429de8ef1cdee73bf6d64c61e/napari/components/experimental/chunk/_delay_queue.py#L123-L148
|
|
guildai/guildai
|
1665985a3d4d788efc1a3180ca51cc417f71ca78
|
guild/pip_util.py
|
python
|
_pip_get_entrypoints_patch
|
(filename)
|
return console, gui
|
See `_ensure_pip_get_entrypoints_patch` for details.
|
See `_ensure_pip_get_entrypoints_patch` for details.
|
[
"See",
"_ensure_pip_get_entrypoints_patch",
"for",
"details",
"."
] |
def _pip_get_entrypoints_patch(filename):
"""See `_ensure_pip_get_entrypoints_patch` for details."""
from pip._vendor.six import StringIO
from pip._vendor import pkg_resources
if not os.path.exists(filename):
return {}, {}
# This is done because you can pass a string to entry_points wrappers which
# means that they may or may not be valid INI files. The attempt here is to
# strip leading and trailing whitespace in order to make them valid INI
# files.
with open(filename) as fp:
data = StringIO()
for line in fp:
data.write(line.strip())
data.write("\n")
data.seek(0)
# get the entry points and then the script names
entry_points = pkg_resources.EntryPoint.parse_map(data)
console = entry_points.get('console_scripts', {})
gui = entry_points.get('gui_scripts', {})
def _split_ep(s):
"""get the string representation of EntryPoint, remove space and split
on '='"""
return str(s).replace(" ", "").split("=")
# convert the EntryPoint objects into strings with module:function
console = dict(_split_ep(v) for v in console.values())
gui = dict(_split_ep(v) for v in gui.values())
return console, gui
|
[
"def",
"_pip_get_entrypoints_patch",
"(",
"filename",
")",
":",
"from",
"pip",
".",
"_vendor",
".",
"six",
"import",
"StringIO",
"from",
"pip",
".",
"_vendor",
"import",
"pkg_resources",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"return",
"{",
"}",
",",
"{",
"}",
"# This is done because you can pass a string to entry_points wrappers which",
"# means that they may or may not be valid INI files. The attempt here is to",
"# strip leading and trailing whitespace in order to make them valid INI",
"# files.",
"with",
"open",
"(",
"filename",
")",
"as",
"fp",
":",
"data",
"=",
"StringIO",
"(",
")",
"for",
"line",
"in",
"fp",
":",
"data",
".",
"write",
"(",
"line",
".",
"strip",
"(",
")",
")",
"data",
".",
"write",
"(",
"\"\\n\"",
")",
"data",
".",
"seek",
"(",
"0",
")",
"# get the entry points and then the script names",
"entry_points",
"=",
"pkg_resources",
".",
"EntryPoint",
".",
"parse_map",
"(",
"data",
")",
"console",
"=",
"entry_points",
".",
"get",
"(",
"'console_scripts'",
",",
"{",
"}",
")",
"gui",
"=",
"entry_points",
".",
"get",
"(",
"'gui_scripts'",
",",
"{",
"}",
")",
"def",
"_split_ep",
"(",
"s",
")",
":",
"\"\"\"get the string representation of EntryPoint, remove space and split\n on '='\"\"\"",
"return",
"str",
"(",
"s",
")",
".",
"replace",
"(",
"\" \"",
",",
"\"\"",
")",
".",
"split",
"(",
"\"=\"",
")",
"# convert the EntryPoint objects into strings with module:function",
"console",
"=",
"dict",
"(",
"_split_ep",
"(",
"v",
")",
"for",
"v",
"in",
"console",
".",
"values",
"(",
")",
")",
"gui",
"=",
"dict",
"(",
"_split_ep",
"(",
"v",
")",
"for",
"v",
"in",
"gui",
".",
"values",
"(",
")",
")",
"return",
"console",
",",
"gui"
] |
https://github.com/guildai/guildai/blob/1665985a3d4d788efc1a3180ca51cc417f71ca78/guild/pip_util.py#L135-L167
|
|
sourmash-bio/sourmash
|
73aeb155befd7c94042ddb8ca277a69986f25a55
|
src/sourmash/signature.py
|
python
|
SourmashSignature.jaccard
|
(self, other)
|
return self.minhash.similarity(other.minhash, ignore_abundance=True,
downsample=False)
|
Compute Jaccard similarity with the other MinHash signature.
|
Compute Jaccard similarity with the other MinHash signature.
|
[
"Compute",
"Jaccard",
"similarity",
"with",
"the",
"other",
"MinHash",
"signature",
"."
] |
def jaccard(self, other):
"Compute Jaccard similarity with the other MinHash signature."
return self.minhash.similarity(other.minhash, ignore_abundance=True,
downsample=False)
|
[
"def",
"jaccard",
"(",
"self",
",",
"other",
")",
":",
"return",
"self",
".",
"minhash",
".",
"similarity",
"(",
"other",
".",
"minhash",
",",
"ignore_abundance",
"=",
"True",
",",
"downsample",
"=",
"False",
")"
] |
https://github.com/sourmash-bio/sourmash/blob/73aeb155befd7c94042ddb8ca277a69986f25a55/src/sourmash/signature.py#L140-L143
|
|
sagemath/sage
|
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
|
src/sage/schemes/curves/constructor.py
|
python
|
Curve
|
(F, A=None)
|
Return the plane or space curve defined by ``F``, where ``F`` can be either
a multivariate polynomial, a list or tuple of polynomials, or an algebraic
scheme.
If no ambient space is passed in for ``A``, and if ``F`` is not an
algebraic scheme, a new ambient space is constructed.
Also not specifying an ambient space will cause the curve to be defined in
either affine or projective space based on properties of ``F``. In
particular, if ``F`` contains a nonhomogeneous polynomial, the curve is
affine, and if ``F`` consists of homogeneous polynomials, then the curve is
projective.
INPUT:
- ``F`` -- a multivariate polynomial, or a list or tuple of polynomials, or an algebraic scheme.
- ``A`` -- (default: None) an ambient space in which to create the curve.
EXAMPLES: A projective plane curve. ::
sage: x,y,z = QQ['x,y,z'].gens()
sage: C = Curve(x^3 + y^3 + z^3); C
Projective Plane Curve over Rational Field defined by x^3 + y^3 + z^3
sage: C.genus()
1
Affine plane curves. ::
sage: x,y = GF(7)['x,y'].gens()
sage: C = Curve(y^2 + x^3 + x^10); C
Affine Plane Curve over Finite Field of size 7 defined by x^10 + x^3 + y^2
sage: C.genus()
0
sage: x, y = QQ['x,y'].gens()
sage: Curve(x^3 + y^3 + 1)
Affine Plane Curve over Rational Field defined by x^3 + y^3 + 1
A projective space curve. ::
sage: x,y,z,w = QQ['x,y,z,w'].gens()
sage: C = Curve([x^3 + y^3 - z^3 - w^3, x^5 - y*z^4]); C
Projective Curve over Rational Field defined by x^3 + y^3 - z^3 - w^3, x^5 - y*z^4
sage: C.genus()
13
An affine space curve. ::
sage: x,y,z = QQ['x,y,z'].gens()
sage: C = Curve([y^2 + x^3 + x^10 + z^7, x^2 + y^2]); C
Affine Curve over Rational Field defined by x^10 + z^7 + x^3 + y^2, x^2 + y^2
sage: C.genus()
47
We can also make non-reduced non-irreducible curves. ::
sage: x,y,z = QQ['x,y,z'].gens()
sage: Curve((x-y)*(x+y))
Projective Conic Curve over Rational Field defined by x^2 - y^2
sage: Curve((x-y)^2*(x+y)^2)
Projective Plane Curve over Rational Field defined by x^4 - 2*x^2*y^2 + y^4
A union of curves is a curve. ::
sage: x,y,z = QQ['x,y,z'].gens()
sage: C = Curve(x^3 + y^3 + z^3)
sage: D = Curve(x^4 + y^4 + z^4)
sage: C.union(D)
Projective Plane Curve over Rational Field defined by
x^7 + x^4*y^3 + x^3*y^4 + y^7 + x^4*z^3 + y^4*z^3 + x^3*z^4 + y^3*z^4 + z^7
The intersection is not a curve, though it is a scheme. ::
sage: X = C.intersection(D); X
Closed subscheme of Projective Space of dimension 2 over Rational Field defined by:
x^3 + y^3 + z^3,
x^4 + y^4 + z^4
Note that the intersection has dimension 0. ::
sage: X.dimension()
0
sage: I = X.defining_ideal(); I
Ideal (x^3 + y^3 + z^3, x^4 + y^4 + z^4) of Multivariate Polynomial Ring in x, y, z over Rational Field
If only a polynomial in three variables is given, then it must be
homogeneous such that a projective curve is constructed. ::
sage: x,y,z = QQ['x,y,z'].gens()
sage: Curve(x^2+y^2)
Projective Conic Curve over Rational Field defined by x^2 + y^2
sage: Curve(x^2+y^2+z)
Traceback (most recent call last):
...
TypeError: x^2 + y^2 + z is not a homogeneous polynomial
An ambient space can be specified to construct a space curve in an affine
or a projective space. ::
sage: A.<x,y,z> = AffineSpace(QQ, 3)
sage: C = Curve([y - x^2, z - x^3], A)
sage: C
Affine Curve over Rational Field defined by -x^2 + y, -x^3 + z
sage: A == C.ambient_space()
True
The defining polynomial must be nonzero unless the ambient space itself is
of dimension 1. ::
sage: P1.<x,y> = ProjectiveSpace(1,GF(5))
sage: S = P1.coordinate_ring()
sage: Curve(S(0), P1)
Projective Line over Finite Field of size 5
sage: Curve(P1)
Projective Line over Finite Field of size 5
::
sage: A1.<x> = AffineSpace(1, QQ)
sage: R = A1.coordinate_ring()
sage: Curve(R(0), A1)
Affine Line over Rational Field
sage: Curve(A1)
Affine Line over Rational Field
|
Return the plane or space curve defined by ``F``, where ``F`` can be either
a multivariate polynomial, a list or tuple of polynomials, or an algebraic
scheme.
|
[
"Return",
"the",
"plane",
"or",
"space",
"curve",
"defined",
"by",
"F",
"where",
"F",
"can",
"be",
"either",
"a",
"multivariate",
"polynomial",
"a",
"list",
"or",
"tuple",
"of",
"polynomials",
"or",
"an",
"algebraic",
"scheme",
"."
] |
def Curve(F, A=None):
"""
Return the plane or space curve defined by ``F``, where ``F`` can be either
a multivariate polynomial, a list or tuple of polynomials, or an algebraic
scheme.
If no ambient space is passed in for ``A``, and if ``F`` is not an
algebraic scheme, a new ambient space is constructed.
Also not specifying an ambient space will cause the curve to be defined in
either affine or projective space based on properties of ``F``. In
particular, if ``F`` contains a nonhomogeneous polynomial, the curve is
affine, and if ``F`` consists of homogeneous polynomials, then the curve is
projective.
INPUT:
- ``F`` -- a multivariate polynomial, or a list or tuple of polynomials, or an algebraic scheme.
- ``A`` -- (default: None) an ambient space in which to create the curve.
EXAMPLES: A projective plane curve. ::
sage: x,y,z = QQ['x,y,z'].gens()
sage: C = Curve(x^3 + y^3 + z^3); C
Projective Plane Curve over Rational Field defined by x^3 + y^3 + z^3
sage: C.genus()
1
Affine plane curves. ::
sage: x,y = GF(7)['x,y'].gens()
sage: C = Curve(y^2 + x^3 + x^10); C
Affine Plane Curve over Finite Field of size 7 defined by x^10 + x^3 + y^2
sage: C.genus()
0
sage: x, y = QQ['x,y'].gens()
sage: Curve(x^3 + y^3 + 1)
Affine Plane Curve over Rational Field defined by x^3 + y^3 + 1
A projective space curve. ::
sage: x,y,z,w = QQ['x,y,z,w'].gens()
sage: C = Curve([x^3 + y^3 - z^3 - w^3, x^5 - y*z^4]); C
Projective Curve over Rational Field defined by x^3 + y^3 - z^3 - w^3, x^5 - y*z^4
sage: C.genus()
13
An affine space curve. ::
sage: x,y,z = QQ['x,y,z'].gens()
sage: C = Curve([y^2 + x^3 + x^10 + z^7, x^2 + y^2]); C
Affine Curve over Rational Field defined by x^10 + z^7 + x^3 + y^2, x^2 + y^2
sage: C.genus()
47
We can also make non-reduced non-irreducible curves. ::
sage: x,y,z = QQ['x,y,z'].gens()
sage: Curve((x-y)*(x+y))
Projective Conic Curve over Rational Field defined by x^2 - y^2
sage: Curve((x-y)^2*(x+y)^2)
Projective Plane Curve over Rational Field defined by x^4 - 2*x^2*y^2 + y^4
A union of curves is a curve. ::
sage: x,y,z = QQ['x,y,z'].gens()
sage: C = Curve(x^3 + y^3 + z^3)
sage: D = Curve(x^4 + y^4 + z^4)
sage: C.union(D)
Projective Plane Curve over Rational Field defined by
x^7 + x^4*y^3 + x^3*y^4 + y^7 + x^4*z^3 + y^4*z^3 + x^3*z^4 + y^3*z^4 + z^7
The intersection is not a curve, though it is a scheme. ::
sage: X = C.intersection(D); X
Closed subscheme of Projective Space of dimension 2 over Rational Field defined by:
x^3 + y^3 + z^3,
x^4 + y^4 + z^4
Note that the intersection has dimension 0. ::
sage: X.dimension()
0
sage: I = X.defining_ideal(); I
Ideal (x^3 + y^3 + z^3, x^4 + y^4 + z^4) of Multivariate Polynomial Ring in x, y, z over Rational Field
If only a polynomial in three variables is given, then it must be
homogeneous such that a projective curve is constructed. ::
sage: x,y,z = QQ['x,y,z'].gens()
sage: Curve(x^2+y^2)
Projective Conic Curve over Rational Field defined by x^2 + y^2
sage: Curve(x^2+y^2+z)
Traceback (most recent call last):
...
TypeError: x^2 + y^2 + z is not a homogeneous polynomial
An ambient space can be specified to construct a space curve in an affine
or a projective space. ::
sage: A.<x,y,z> = AffineSpace(QQ, 3)
sage: C = Curve([y - x^2, z - x^3], A)
sage: C
Affine Curve over Rational Field defined by -x^2 + y, -x^3 + z
sage: A == C.ambient_space()
True
The defining polynomial must be nonzero unless the ambient space itself is
of dimension 1. ::
sage: P1.<x,y> = ProjectiveSpace(1,GF(5))
sage: S = P1.coordinate_ring()
sage: Curve(S(0), P1)
Projective Line over Finite Field of size 5
sage: Curve(P1)
Projective Line over Finite Field of size 5
::
sage: A1.<x> = AffineSpace(1, QQ)
sage: R = A1.coordinate_ring()
sage: Curve(R(0), A1)
Affine Line over Rational Field
sage: Curve(A1)
Affine Line over Rational Field
"""
if A is None:
if is_AmbientSpace(F) and F.dimension() == 1:
return Curve(F.coordinate_ring().zero(), F)
if is_AlgebraicScheme(F):
return Curve(F.defining_polynomials(), F.ambient_space())
if isinstance(F, (list, tuple)):
P = Sequence(F).universe()
if not is_MPolynomialRing(P):
raise TypeError("universe of F must be a multivariate polynomial ring")
for f in F:
if not f.is_homogeneous():
A = AffineSpace(P.ngens(), P.base_ring(), names=P.variable_names())
A._coordinate_ring = P
break
else:
A = ProjectiveSpace(P.ngens()-1, P.base_ring(), names=P.variable_names())
A._coordinate_ring = P
elif is_MPolynomial(F): # define a plane curve
P = F.parent()
k = F.base_ring()
if not k.is_field():
if k.is_integral_domain(): # upgrade to a field
P = P.change_ring(k.fraction_field())
F = P(F)
k = F.base_ring()
else:
raise TypeError("not a multivariate polynomial over a field or an integral domain")
if F.parent().ngens() == 2:
if F == 0:
raise ValueError("defining polynomial of curve must be nonzero")
A = AffineSpace(2, P.base_ring(), names=P.variable_names())
A._coordinate_ring = P
elif F.parent().ngens() == 3:
if F == 0:
raise ValueError("defining polynomial of curve must be nonzero")
# special case: construct a conic curve
if F.total_degree() == 2 and k.is_field():
return Conic(k, F)
A = ProjectiveSpace(2, P.base_ring(), names=P.variable_names())
A._coordinate_ring = P
elif F.parent().ngens() == 1:
if not F.is_zero():
raise ValueError("defining polynomial of curve must be zero "
"if the ambient space is of dimension 1")
A = AffineSpace(1, P.base_ring(), names=P.variable_names())
A._coordinate_ring = P
else:
raise TypeError("number of variables of F (={}) must be 2 or 3".format(F))
F = [F]
else:
raise TypeError("F (={}) must be a multivariate polynomial".format(F))
else:
if not is_AmbientSpace(A):
raise TypeError("ambient space must be either an affine or projective space")
if not isinstance(F, (list, tuple)):
F = [F]
if not all(f.parent() == A.coordinate_ring() for f in F):
raise TypeError("need a list of polynomials of the coordinate ring of {}".format(A))
n = A.dimension_relative()
if n < 1:
raise TypeError("ambient space should be an affine or projective space of positive dimension")
k = A.base_ring()
if is_AffineSpace(A):
if n != 2:
if is_FiniteField(k):
if A.coordinate_ring().ideal(F).is_prime():
return IntegralAffineCurve_finite_field(A, F)
if k in Fields():
if k == QQ and A.coordinate_ring().ideal(F).is_prime():
return IntegralAffineCurve(A, F)
return AffineCurve_field(A, F)
return AffineCurve(A, F)
if not (len(F) == 1 and F[0] != 0 and F[0].degree() > 0):
raise TypeError("need a single nonconstant polynomial to define a plane curve")
F = F[0]
if is_FiniteField(k):
if _is_irreducible_and_reduced(F):
return IntegralAffinePlaneCurve_finite_field(A, F)
return AffinePlaneCurve_finite_field(A, F)
if k in Fields():
if k == QQ and _is_irreducible_and_reduced(F):
return IntegralAffinePlaneCurve(A, F)
return AffinePlaneCurve_field(A, F)
return AffinePlaneCurve(A, F)
elif is_ProjectiveSpace(A):
if n != 2:
if not all(f.is_homogeneous() for f in F):
raise TypeError("polynomials defining a curve in a projective space must be homogeneous")
if is_FiniteField(k):
if A.coordinate_ring().ideal(F).is_prime():
return IntegralProjectiveCurve_finite_field(A, F)
if k in Fields():
if k == QQ and A.coordinate_ring().ideal(F).is_prime():
return IntegralProjectiveCurve(A, F)
return ProjectiveCurve_field(A, F)
return ProjectiveCurve(A, F)
# There is no dimension check when initializing a plane curve, so check
# here that F consists of a single nonconstant polynomial.
if not (len(F) == 1 and F[0] != 0 and F[0].degree() > 0):
raise TypeError("need a single nonconstant polynomial to define a plane curve")
F = F[0]
if not F.is_homogeneous():
raise TypeError("{} is not a homogeneous polynomial".format(F))
if is_FiniteField(k):
if _is_irreducible_and_reduced(F):
return IntegralProjectivePlaneCurve_finite_field(A, F)
return ProjectivePlaneCurve_finite_field(A, F)
if k in Fields():
if k == QQ and _is_irreducible_and_reduced(F):
return IntegralProjectivePlaneCurve(A, F)
return ProjectivePlaneCurve_field(A, F)
return ProjectivePlaneCurve(A, F)
else:
raise TypeError('ambient space neither affine nor projective')
|
[
"def",
"Curve",
"(",
"F",
",",
"A",
"=",
"None",
")",
":",
"if",
"A",
"is",
"None",
":",
"if",
"is_AmbientSpace",
"(",
"F",
")",
"and",
"F",
".",
"dimension",
"(",
")",
"==",
"1",
":",
"return",
"Curve",
"(",
"F",
".",
"coordinate_ring",
"(",
")",
".",
"zero",
"(",
")",
",",
"F",
")",
"if",
"is_AlgebraicScheme",
"(",
"F",
")",
":",
"return",
"Curve",
"(",
"F",
".",
"defining_polynomials",
"(",
")",
",",
"F",
".",
"ambient_space",
"(",
")",
")",
"if",
"isinstance",
"(",
"F",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"P",
"=",
"Sequence",
"(",
"F",
")",
".",
"universe",
"(",
")",
"if",
"not",
"is_MPolynomialRing",
"(",
"P",
")",
":",
"raise",
"TypeError",
"(",
"\"universe of F must be a multivariate polynomial ring\"",
")",
"for",
"f",
"in",
"F",
":",
"if",
"not",
"f",
".",
"is_homogeneous",
"(",
")",
":",
"A",
"=",
"AffineSpace",
"(",
"P",
".",
"ngens",
"(",
")",
",",
"P",
".",
"base_ring",
"(",
")",
",",
"names",
"=",
"P",
".",
"variable_names",
"(",
")",
")",
"A",
".",
"_coordinate_ring",
"=",
"P",
"break",
"else",
":",
"A",
"=",
"ProjectiveSpace",
"(",
"P",
".",
"ngens",
"(",
")",
"-",
"1",
",",
"P",
".",
"base_ring",
"(",
")",
",",
"names",
"=",
"P",
".",
"variable_names",
"(",
")",
")",
"A",
".",
"_coordinate_ring",
"=",
"P",
"elif",
"is_MPolynomial",
"(",
"F",
")",
":",
"# define a plane curve",
"P",
"=",
"F",
".",
"parent",
"(",
")",
"k",
"=",
"F",
".",
"base_ring",
"(",
")",
"if",
"not",
"k",
".",
"is_field",
"(",
")",
":",
"if",
"k",
".",
"is_integral_domain",
"(",
")",
":",
"# upgrade to a field",
"P",
"=",
"P",
".",
"change_ring",
"(",
"k",
".",
"fraction_field",
"(",
")",
")",
"F",
"=",
"P",
"(",
"F",
")",
"k",
"=",
"F",
".",
"base_ring",
"(",
")",
"else",
":",
"raise",
"TypeError",
"(",
"\"not a multivariate polynomial over a field or an integral domain\"",
")",
"if",
"F",
".",
"parent",
"(",
")",
".",
"ngens",
"(",
")",
"==",
"2",
":",
"if",
"F",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"\"defining polynomial of curve must be nonzero\"",
")",
"A",
"=",
"AffineSpace",
"(",
"2",
",",
"P",
".",
"base_ring",
"(",
")",
",",
"names",
"=",
"P",
".",
"variable_names",
"(",
")",
")",
"A",
".",
"_coordinate_ring",
"=",
"P",
"elif",
"F",
".",
"parent",
"(",
")",
".",
"ngens",
"(",
")",
"==",
"3",
":",
"if",
"F",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"\"defining polynomial of curve must be nonzero\"",
")",
"# special case: construct a conic curve",
"if",
"F",
".",
"total_degree",
"(",
")",
"==",
"2",
"and",
"k",
".",
"is_field",
"(",
")",
":",
"return",
"Conic",
"(",
"k",
",",
"F",
")",
"A",
"=",
"ProjectiveSpace",
"(",
"2",
",",
"P",
".",
"base_ring",
"(",
")",
",",
"names",
"=",
"P",
".",
"variable_names",
"(",
")",
")",
"A",
".",
"_coordinate_ring",
"=",
"P",
"elif",
"F",
".",
"parent",
"(",
")",
".",
"ngens",
"(",
")",
"==",
"1",
":",
"if",
"not",
"F",
".",
"is_zero",
"(",
")",
":",
"raise",
"ValueError",
"(",
"\"defining polynomial of curve must be zero \"",
"\"if the ambient space is of dimension 1\"",
")",
"A",
"=",
"AffineSpace",
"(",
"1",
",",
"P",
".",
"base_ring",
"(",
")",
",",
"names",
"=",
"P",
".",
"variable_names",
"(",
")",
")",
"A",
".",
"_coordinate_ring",
"=",
"P",
"else",
":",
"raise",
"TypeError",
"(",
"\"number of variables of F (={}) must be 2 or 3\"",
".",
"format",
"(",
"F",
")",
")",
"F",
"=",
"[",
"F",
"]",
"else",
":",
"raise",
"TypeError",
"(",
"\"F (={}) must be a multivariate polynomial\"",
".",
"format",
"(",
"F",
")",
")",
"else",
":",
"if",
"not",
"is_AmbientSpace",
"(",
"A",
")",
":",
"raise",
"TypeError",
"(",
"\"ambient space must be either an affine or projective space\"",
")",
"if",
"not",
"isinstance",
"(",
"F",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"F",
"=",
"[",
"F",
"]",
"if",
"not",
"all",
"(",
"f",
".",
"parent",
"(",
")",
"==",
"A",
".",
"coordinate_ring",
"(",
")",
"for",
"f",
"in",
"F",
")",
":",
"raise",
"TypeError",
"(",
"\"need a list of polynomials of the coordinate ring of {}\"",
".",
"format",
"(",
"A",
")",
")",
"n",
"=",
"A",
".",
"dimension_relative",
"(",
")",
"if",
"n",
"<",
"1",
":",
"raise",
"TypeError",
"(",
"\"ambient space should be an affine or projective space of positive dimension\"",
")",
"k",
"=",
"A",
".",
"base_ring",
"(",
")",
"if",
"is_AffineSpace",
"(",
"A",
")",
":",
"if",
"n",
"!=",
"2",
":",
"if",
"is_FiniteField",
"(",
"k",
")",
":",
"if",
"A",
".",
"coordinate_ring",
"(",
")",
".",
"ideal",
"(",
"F",
")",
".",
"is_prime",
"(",
")",
":",
"return",
"IntegralAffineCurve_finite_field",
"(",
"A",
",",
"F",
")",
"if",
"k",
"in",
"Fields",
"(",
")",
":",
"if",
"k",
"==",
"QQ",
"and",
"A",
".",
"coordinate_ring",
"(",
")",
".",
"ideal",
"(",
"F",
")",
".",
"is_prime",
"(",
")",
":",
"return",
"IntegralAffineCurve",
"(",
"A",
",",
"F",
")",
"return",
"AffineCurve_field",
"(",
"A",
",",
"F",
")",
"return",
"AffineCurve",
"(",
"A",
",",
"F",
")",
"if",
"not",
"(",
"len",
"(",
"F",
")",
"==",
"1",
"and",
"F",
"[",
"0",
"]",
"!=",
"0",
"and",
"F",
"[",
"0",
"]",
".",
"degree",
"(",
")",
">",
"0",
")",
":",
"raise",
"TypeError",
"(",
"\"need a single nonconstant polynomial to define a plane curve\"",
")",
"F",
"=",
"F",
"[",
"0",
"]",
"if",
"is_FiniteField",
"(",
"k",
")",
":",
"if",
"_is_irreducible_and_reduced",
"(",
"F",
")",
":",
"return",
"IntegralAffinePlaneCurve_finite_field",
"(",
"A",
",",
"F",
")",
"return",
"AffinePlaneCurve_finite_field",
"(",
"A",
",",
"F",
")",
"if",
"k",
"in",
"Fields",
"(",
")",
":",
"if",
"k",
"==",
"QQ",
"and",
"_is_irreducible_and_reduced",
"(",
"F",
")",
":",
"return",
"IntegralAffinePlaneCurve",
"(",
"A",
",",
"F",
")",
"return",
"AffinePlaneCurve_field",
"(",
"A",
",",
"F",
")",
"return",
"AffinePlaneCurve",
"(",
"A",
",",
"F",
")",
"elif",
"is_ProjectiveSpace",
"(",
"A",
")",
":",
"if",
"n",
"!=",
"2",
":",
"if",
"not",
"all",
"(",
"f",
".",
"is_homogeneous",
"(",
")",
"for",
"f",
"in",
"F",
")",
":",
"raise",
"TypeError",
"(",
"\"polynomials defining a curve in a projective space must be homogeneous\"",
")",
"if",
"is_FiniteField",
"(",
"k",
")",
":",
"if",
"A",
".",
"coordinate_ring",
"(",
")",
".",
"ideal",
"(",
"F",
")",
".",
"is_prime",
"(",
")",
":",
"return",
"IntegralProjectiveCurve_finite_field",
"(",
"A",
",",
"F",
")",
"if",
"k",
"in",
"Fields",
"(",
")",
":",
"if",
"k",
"==",
"QQ",
"and",
"A",
".",
"coordinate_ring",
"(",
")",
".",
"ideal",
"(",
"F",
")",
".",
"is_prime",
"(",
")",
":",
"return",
"IntegralProjectiveCurve",
"(",
"A",
",",
"F",
")",
"return",
"ProjectiveCurve_field",
"(",
"A",
",",
"F",
")",
"return",
"ProjectiveCurve",
"(",
"A",
",",
"F",
")",
"# There is no dimension check when initializing a plane curve, so check",
"# here that F consists of a single nonconstant polynomial.",
"if",
"not",
"(",
"len",
"(",
"F",
")",
"==",
"1",
"and",
"F",
"[",
"0",
"]",
"!=",
"0",
"and",
"F",
"[",
"0",
"]",
".",
"degree",
"(",
")",
">",
"0",
")",
":",
"raise",
"TypeError",
"(",
"\"need a single nonconstant polynomial to define a plane curve\"",
")",
"F",
"=",
"F",
"[",
"0",
"]",
"if",
"not",
"F",
".",
"is_homogeneous",
"(",
")",
":",
"raise",
"TypeError",
"(",
"\"{} is not a homogeneous polynomial\"",
".",
"format",
"(",
"F",
")",
")",
"if",
"is_FiniteField",
"(",
"k",
")",
":",
"if",
"_is_irreducible_and_reduced",
"(",
"F",
")",
":",
"return",
"IntegralProjectivePlaneCurve_finite_field",
"(",
"A",
",",
"F",
")",
"return",
"ProjectivePlaneCurve_finite_field",
"(",
"A",
",",
"F",
")",
"if",
"k",
"in",
"Fields",
"(",
")",
":",
"if",
"k",
"==",
"QQ",
"and",
"_is_irreducible_and_reduced",
"(",
"F",
")",
":",
"return",
"IntegralProjectivePlaneCurve",
"(",
"A",
",",
"F",
")",
"return",
"ProjectivePlaneCurve_field",
"(",
"A",
",",
"F",
")",
"return",
"ProjectivePlaneCurve",
"(",
"A",
",",
"F",
")",
"else",
":",
"raise",
"TypeError",
"(",
"'ambient space neither affine nor projective'",
")"
] |
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/schemes/curves/constructor.py#L95-L353
|
||
intrig-unicamp/mininet-wifi
|
3c8a8f63bd4aa043aa9c1ad16f304dec2916f5ba
|
mn_wifi/sumo/traci/_simulation.py
|
python
|
SimulationDomain.getLoadedNumber
|
(self)
|
return self._getUniversal(tc.VAR_LOADED_VEHICLES_NUMBER)
|
getLoadedNumber() -> integer
Returns the number of vehicles which were loaded in this time step.
|
getLoadedNumber() -> integer
Returns the number of vehicles which were loaded in this time step.
|
[
"getLoadedNumber",
"()",
"-",
">",
"integer",
"Returns",
"the",
"number",
"of",
"vehicles",
"which",
"were",
"loaded",
"in",
"this",
"time",
"step",
"."
] |
def getLoadedNumber(self):
"""getLoadedNumber() -> integer
Returns the number of vehicles which were loaded in this time step.
"""
return self._getUniversal(tc.VAR_LOADED_VEHICLES_NUMBER)
|
[
"def",
"getLoadedNumber",
"(",
"self",
")",
":",
"return",
"self",
".",
"_getUniversal",
"(",
"tc",
".",
"VAR_LOADED_VEHICLES_NUMBER",
")"
] |
https://github.com/intrig-unicamp/mininet-wifi/blob/3c8a8f63bd4aa043aa9c1ad16f304dec2916f5ba/mn_wifi/sumo/traci/_simulation.py#L107-L111
|
|
erickrf/nlpnet
|
60368e079591d9cbef6044dca72ffe46ec9c572e
|
nlpnet/reader.py
|
python
|
TaggerReader.task
|
(self)
|
return None
|
The task the tagger reads data for.
Must be defined in subclasses.
|
The task the tagger reads data for.
Must be defined in subclasses.
|
[
"The",
"task",
"the",
"tagger",
"reads",
"data",
"for",
".",
"Must",
"be",
"defined",
"in",
"subclasses",
"."
] |
def task(self):
"""
The task the tagger reads data for.
Must be defined in subclasses.
"""
return None
|
[
"def",
"task",
"(",
"self",
")",
":",
"return",
"None"
] |
https://github.com/erickrf/nlpnet/blob/60368e079591d9cbef6044dca72ffe46ec9c572e/nlpnet/reader.py#L82-L87
|
|
abulka/pynsource
|
886bf4ea05bede67fe7846185fbe78704c2a0e8a
|
Research/wx doco/ImageViewer7b.py
|
python
|
MyCanvas.OnLeftUp
|
(self, event)
|
Left mouse button up.
|
Left mouse button up.
|
[
"Left",
"mouse",
"button",
"up",
"."
] |
def OnLeftUp(self, event): # ANDY PAN
"""Left mouse button up."""
if event.ShiftDown():
event.Skip()
return
self.last_drag_x = self.last_drag_y = None
self.SetCursor(wx.StockCursor(wx.CURSOR_DEFAULT))
# turn off drag
self.was_dragging = False
# force PAINT event to remove selection box (if required)
# self.Update()
event.Skip()
|
[
"def",
"OnLeftUp",
"(",
"self",
",",
"event",
")",
":",
"# ANDY PAN",
"if",
"event",
".",
"ShiftDown",
"(",
")",
":",
"event",
".",
"Skip",
"(",
")",
"return",
"self",
".",
"last_drag_x",
"=",
"self",
".",
"last_drag_y",
"=",
"None",
"self",
".",
"SetCursor",
"(",
"wx",
".",
"StockCursor",
"(",
"wx",
".",
"CURSOR_DEFAULT",
")",
")",
"# turn off drag",
"self",
".",
"was_dragging",
"=",
"False",
"# force PAINT event to remove selection box (if required)",
"# self.Update()",
"event",
".",
"Skip",
"(",
")"
] |
https://github.com/abulka/pynsource/blob/886bf4ea05bede67fe7846185fbe78704c2a0e8a/Research/wx doco/ImageViewer7b.py#L277-L288
|
||
cuthbertLab/music21
|
bd30d4663e52955ed922c10fdf541419d8c67671
|
music21/serial.py
|
python
|
ToneRow.pitchClasses
|
(self)
|
return pitchList
|
Convenience function showing the pitch classes of a
:class:`~music21.serial.ToneRow` as a list.
>>> fiveFold = [5 * i for i in range(12)]
>>> fiveFold
[0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55]
>>> quintupleRow = serial.pcToToneRow(fiveFold)
>>> quintupleRow
<music21.serial.TwelveToneRow 05A3816B4927>
>>> quintupleRow.pitchClasses()
[0, 5, 10, 3, 8, 1, 6, 11, 4, 9, 2, 7]
>>> halfStep = serial.pcToToneRow([0, 1])
>>> halfStep.pitchClasses()
[0, 1]
|
Convenience function showing the pitch classes of a
:class:`~music21.serial.ToneRow` as a list.
|
[
"Convenience",
"function",
"showing",
"the",
"pitch",
"classes",
"of",
"a",
":",
"class",
":",
"~music21",
".",
"serial",
".",
"ToneRow",
"as",
"a",
"list",
"."
] |
def pitchClasses(self):
'''
Convenience function showing the pitch classes of a
:class:`~music21.serial.ToneRow` as a list.
>>> fiveFold = [5 * i for i in range(12)]
>>> fiveFold
[0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55]
>>> quintupleRow = serial.pcToToneRow(fiveFold)
>>> quintupleRow
<music21.serial.TwelveToneRow 05A3816B4927>
>>> quintupleRow.pitchClasses()
[0, 5, 10, 3, 8, 1, 6, 11, 4, 9, 2, 7]
>>> halfStep = serial.pcToToneRow([0, 1])
>>> halfStep.pitchClasses()
[0, 1]
'''
pitchList = [n.pitch.pitchClass for n in self]
return pitchList
|
[
"def",
"pitchClasses",
"(",
"self",
")",
":",
"pitchList",
"=",
"[",
"n",
".",
"pitch",
".",
"pitchClass",
"for",
"n",
"in",
"self",
"]",
"return",
"pitchList"
] |
https://github.com/cuthbertLab/music21/blob/bd30d4663e52955ed922c10fdf541419d8c67671/music21/serial.py#L321-L341
|
|
MatthewJA/Inverse-Reinforcement-Learning
|
56983aeee85eacb07164c313c03457bfaaa62778
|
irl/mdp/gridworld.py
|
python
|
Gridworld.feature_matrix
|
(self, feature_map="ident")
|
return np.array(features)
|
Get the feature matrix for this gridworld.
feature_map: Which feature map to use (default ident). String in {ident,
coord, proxi}.
-> NumPy array with shape (n_states, d_states).
|
Get the feature matrix for this gridworld.
|
[
"Get",
"the",
"feature",
"matrix",
"for",
"this",
"gridworld",
"."
] |
def feature_matrix(self, feature_map="ident"):
"""
Get the feature matrix for this gridworld.
feature_map: Which feature map to use (default ident). String in {ident,
coord, proxi}.
-> NumPy array with shape (n_states, d_states).
"""
features = []
for n in range(self.n_states):
f = self.feature_vector(n, feature_map)
features.append(f)
return np.array(features)
|
[
"def",
"feature_matrix",
"(",
"self",
",",
"feature_map",
"=",
"\"ident\"",
")",
":",
"features",
"=",
"[",
"]",
"for",
"n",
"in",
"range",
"(",
"self",
".",
"n_states",
")",
":",
"f",
"=",
"self",
".",
"feature_vector",
"(",
"n",
",",
"feature_map",
")",
"features",
".",
"append",
"(",
"f",
")",
"return",
"np",
".",
"array",
"(",
"features",
")"
] |
https://github.com/MatthewJA/Inverse-Reinforcement-Learning/blob/56983aeee85eacb07164c313c03457bfaaa62778/irl/mdp/gridworld.py#L71-L84
|
|
TencentCloud/tencentcloud-sdk-python
|
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
|
tencentcloud/vpc/v20170312/models.py
|
python
|
DefaultVpcSubnet.__init__
|
(self)
|
r"""
:param VpcId: 默认VpcId
:type VpcId: str
:param SubnetId: 默认SubnetId
:type SubnetId: str
|
r"""
:param VpcId: 默认VpcId
:type VpcId: str
:param SubnetId: 默认SubnetId
:type SubnetId: str
|
[
"r",
":",
"param",
"VpcId",
":",
"默认VpcId",
":",
"type",
"VpcId",
":",
"str",
":",
"param",
"SubnetId",
":",
"默认SubnetId",
":",
"type",
"SubnetId",
":",
"str"
] |
def __init__(self):
r"""
:param VpcId: 默认VpcId
:type VpcId: str
:param SubnetId: 默认SubnetId
:type SubnetId: str
"""
self.VpcId = None
self.SubnetId = None
|
[
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"VpcId",
"=",
"None",
"self",
".",
"SubnetId",
"=",
"None"
] |
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/vpc/v20170312/models.py#L5125-L5133
|
||
deluge-torrent/deluge
|
2316088f5c0dd6cb044d9d4832fa7d56dcc79cdc
|
deluge/ui/gtk3/preferences.py
|
python
|
Preferences.show
|
(self, page=None)
|
Page should be the string in the left list.. ie, 'Network' or
'Bandwidth
|
Page should be the string in the left list.. ie, 'Network' or
'Bandwidth
|
[
"Page",
"should",
"be",
"the",
"string",
"in",
"the",
"left",
"list",
"..",
"ie",
"Network",
"or",
"Bandwidth"
] |
def show(self, page=None):
"""Page should be the string in the left list.. ie, 'Network' or
'Bandwidth'"""
self.window_open = True
if page is not None:
for (index, string, __) in self.liststore:
if page == string:
self.treeview.get_selection().select_path(index)
break
component.get('PluginManager').run_on_show_prefs()
# Update the preferences dialog to reflect current config settings
self.core_config = {}
if client.connected():
self._get_accounts_tab_data()
def on_get_config(config):
self.core_config = config
client.core.get_available_plugins().addCallback(
on_get_available_plugins
)
def on_get_available_plugins(plugins):
self.all_plugins = plugins
client.core.get_enabled_plugins().addCallback(on_get_enabled_plugins)
def on_get_enabled_plugins(plugins):
self.enabled_plugins = plugins
client.core.get_listen_port().addCallback(on_get_listen_port)
def on_get_listen_port(port):
self.active_port = port
client.core.get_session_status(DISK_CACHE_KEYS).addCallback(
on_get_session_status
)
def on_get_session_status(status):
self.cache_status = status
self._show()
# This starts a series of client.core requests prior to showing the window
client.core.get_config().addCallback(on_get_config)
else:
self._show()
|
[
"def",
"show",
"(",
"self",
",",
"page",
"=",
"None",
")",
":",
"self",
".",
"window_open",
"=",
"True",
"if",
"page",
"is",
"not",
"None",
":",
"for",
"(",
"index",
",",
"string",
",",
"__",
")",
"in",
"self",
".",
"liststore",
":",
"if",
"page",
"==",
"string",
":",
"self",
".",
"treeview",
".",
"get_selection",
"(",
")",
".",
"select_path",
"(",
"index",
")",
"break",
"component",
".",
"get",
"(",
"'PluginManager'",
")",
".",
"run_on_show_prefs",
"(",
")",
"# Update the preferences dialog to reflect current config settings",
"self",
".",
"core_config",
"=",
"{",
"}",
"if",
"client",
".",
"connected",
"(",
")",
":",
"self",
".",
"_get_accounts_tab_data",
"(",
")",
"def",
"on_get_config",
"(",
"config",
")",
":",
"self",
".",
"core_config",
"=",
"config",
"client",
".",
"core",
".",
"get_available_plugins",
"(",
")",
".",
"addCallback",
"(",
"on_get_available_plugins",
")",
"def",
"on_get_available_plugins",
"(",
"plugins",
")",
":",
"self",
".",
"all_plugins",
"=",
"plugins",
"client",
".",
"core",
".",
"get_enabled_plugins",
"(",
")",
".",
"addCallback",
"(",
"on_get_enabled_plugins",
")",
"def",
"on_get_enabled_plugins",
"(",
"plugins",
")",
":",
"self",
".",
"enabled_plugins",
"=",
"plugins",
"client",
".",
"core",
".",
"get_listen_port",
"(",
")",
".",
"addCallback",
"(",
"on_get_listen_port",
")",
"def",
"on_get_listen_port",
"(",
"port",
")",
":",
"self",
".",
"active_port",
"=",
"port",
"client",
".",
"core",
".",
"get_session_status",
"(",
"DISK_CACHE_KEYS",
")",
".",
"addCallback",
"(",
"on_get_session_status",
")",
"def",
"on_get_session_status",
"(",
"status",
")",
":",
"self",
".",
"cache_status",
"=",
"status",
"self",
".",
"_show",
"(",
")",
"# This starts a series of client.core requests prior to showing the window",
"client",
".",
"core",
".",
"get_config",
"(",
")",
".",
"addCallback",
"(",
"on_get_config",
")",
"else",
":",
"self",
".",
"_show",
"(",
")"
] |
https://github.com/deluge-torrent/deluge/blob/2316088f5c0dd6cb044d9d4832fa7d56dcc79cdc/deluge/ui/gtk3/preferences.py#L292-L336
|
||
JiYou/openstack
|
8607dd488bde0905044b303eb6e52bdea6806923
|
packages/source/ceilometer/ceilometer/openstack/common/rpc/impl_kombu.py
|
python
|
Connection.declare_fanout_consumer
|
(self, topic, callback)
|
Create a 'fanout' consumer.
|
Create a 'fanout' consumer.
|
[
"Create",
"a",
"fanout",
"consumer",
"."
] |
def declare_fanout_consumer(self, topic, callback):
"""Create a 'fanout' consumer."""
self.declare_consumer(FanoutConsumer, topic, callback)
|
[
"def",
"declare_fanout_consumer",
"(",
"self",
",",
"topic",
",",
"callback",
")",
":",
"self",
".",
"declare_consumer",
"(",
"FanoutConsumer",
",",
"topic",
",",
"callback",
")"
] |
https://github.com/JiYou/openstack/blob/8607dd488bde0905044b303eb6e52bdea6806923/packages/source/ceilometer/ceilometer/openstack/common/rpc/impl_kombu.py#L696-L698
|
||
tp4a/teleport
|
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
|
server/www/packages/packages-linux/x64/cffi/vengine_cpy.py
|
python
|
VCPythonEngine._convert_funcarg_to_c
|
(self, tp, fromvar, tovar, errcode)
|
[] |
def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
extraarg = ''
if isinstance(tp, model.PrimitiveType):
if tp.is_integer_type() and tp.name != '_Bool':
converter = '_cffi_to_c_int'
extraarg = ', %s' % tp.name
else:
converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''),
tp.name.replace(' ', '_'))
errvalue = '-1'
#
elif isinstance(tp, model.PointerType):
self._convert_funcarg_to_c_ptr_or_array(tp, fromvar,
tovar, errcode)
return
#
elif isinstance(tp, (model.StructOrUnion, model.EnumType)):
# a struct (not a struct pointer) as a function argument
self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
% (tovar, self._gettypenum(tp), fromvar))
self._prnt(' %s;' % errcode)
return
#
elif isinstance(tp, model.FunctionPtrType):
converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('')
extraarg = ', _cffi_type(%d)' % self._gettypenum(tp)
errvalue = 'NULL'
#
else:
raise NotImplementedError(tp)
#
self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg))
self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % (
tovar, tp.get_c_name(''), errvalue))
self._prnt(' %s;' % errcode)
|
[
"def",
"_convert_funcarg_to_c",
"(",
"self",
",",
"tp",
",",
"fromvar",
",",
"tovar",
",",
"errcode",
")",
":",
"extraarg",
"=",
"''",
"if",
"isinstance",
"(",
"tp",
",",
"model",
".",
"PrimitiveType",
")",
":",
"if",
"tp",
".",
"is_integer_type",
"(",
")",
"and",
"tp",
".",
"name",
"!=",
"'_Bool'",
":",
"converter",
"=",
"'_cffi_to_c_int'",
"extraarg",
"=",
"', %s'",
"%",
"tp",
".",
"name",
"else",
":",
"converter",
"=",
"'(%s)_cffi_to_c_%s'",
"%",
"(",
"tp",
".",
"get_c_name",
"(",
"''",
")",
",",
"tp",
".",
"name",
".",
"replace",
"(",
"' '",
",",
"'_'",
")",
")",
"errvalue",
"=",
"'-1'",
"#",
"elif",
"isinstance",
"(",
"tp",
",",
"model",
".",
"PointerType",
")",
":",
"self",
".",
"_convert_funcarg_to_c_ptr_or_array",
"(",
"tp",
",",
"fromvar",
",",
"tovar",
",",
"errcode",
")",
"return",
"#",
"elif",
"isinstance",
"(",
"tp",
",",
"(",
"model",
".",
"StructOrUnion",
",",
"model",
".",
"EnumType",
")",
")",
":",
"# a struct (not a struct pointer) as a function argument",
"self",
".",
"_prnt",
"(",
"' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'",
"%",
"(",
"tovar",
",",
"self",
".",
"_gettypenum",
"(",
"tp",
")",
",",
"fromvar",
")",
")",
"self",
".",
"_prnt",
"(",
"' %s;'",
"%",
"errcode",
")",
"return",
"#",
"elif",
"isinstance",
"(",
"tp",
",",
"model",
".",
"FunctionPtrType",
")",
":",
"converter",
"=",
"'(%s)_cffi_to_c_pointer'",
"%",
"tp",
".",
"get_c_name",
"(",
"''",
")",
"extraarg",
"=",
"', _cffi_type(%d)'",
"%",
"self",
".",
"_gettypenum",
"(",
"tp",
")",
"errvalue",
"=",
"'NULL'",
"#",
"else",
":",
"raise",
"NotImplementedError",
"(",
"tp",
")",
"#",
"self",
".",
"_prnt",
"(",
"' %s = %s(%s%s);'",
"%",
"(",
"tovar",
",",
"converter",
",",
"fromvar",
",",
"extraarg",
")",
")",
"self",
".",
"_prnt",
"(",
"' if (%s == (%s)%s && PyErr_Occurred())'",
"%",
"(",
"tovar",
",",
"tp",
".",
"get_c_name",
"(",
"''",
")",
",",
"errvalue",
")",
")",
"self",
".",
"_prnt",
"(",
"' %s;'",
"%",
"errcode",
")"
] |
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-linux/x64/cffi/vengine_cpy.py#L242-L276
|
||||
sagemath/sage
|
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
|
src/sage/combinat/k_tableau.py
|
python
|
StrongTableau.cell_of_highest_head
|
( self, v )
|
return dout
|
Return the cell of the highest head of label ``v`` in the standard part of ``self``.
Return the cell where the head of the ribbon in the highest row is located
in the underlying standard tableau. If there is no cell with entry ``v`` then
the cell returned is `(0, r)` where `r` is the length of the first row.
This cell is calculated by iterating through the diagonals of the tableau.
INPUT:
- ``v`` -- an integer indicating the label in the standard tableau
OUTPUT:
- a pair of integers indicating the coordinates of the head of the highest
ribbon with label ``v``
EXAMPLES::
sage: T = StrongTableau([[-1,2,-3],[-2,3],[3]], 1)
sage: [T.cell_of_highest_head(v) for v in range(1,5)]
[(0, 0), (1, 0), (2, 0), (0, 3)]
sage: T = StrongTableau([[None,None,-3,4],[3,-4]],2)
sage: [T.cell_of_highest_head(v) for v in range(1,5)]
[(1, 0), (1, 1), (0, 4), (0, 4)]
TESTS::
sage: StrongTableau([],2).cell_of_highest_head(1)
(0, 0)
|
Return the cell of the highest head of label ``v`` in the standard part of ``self``.
|
[
"Return",
"the",
"cell",
"of",
"the",
"highest",
"head",
"of",
"label",
"v",
"in",
"the",
"standard",
"part",
"of",
"self",
"."
] |
def cell_of_highest_head( self, v ):
"""
Return the cell of the highest head of label ``v`` in the standard part of ``self``.
Return the cell where the head of the ribbon in the highest row is located
in the underlying standard tableau. If there is no cell with entry ``v`` then
the cell returned is `(0, r)` where `r` is the length of the first row.
This cell is calculated by iterating through the diagonals of the tableau.
INPUT:
- ``v`` -- an integer indicating the label in the standard tableau
OUTPUT:
- a pair of integers indicating the coordinates of the head of the highest
ribbon with label ``v``
EXAMPLES::
sage: T = StrongTableau([[-1,2,-3],[-2,3],[3]], 1)
sage: [T.cell_of_highest_head(v) for v in range(1,5)]
[(0, 0), (1, 0), (2, 0), (0, 3)]
sage: T = StrongTableau([[None,None,-3,4],[3,-4]],2)
sage: [T.cell_of_highest_head(v) for v in range(1,5)]
[(1, 0), (1, 1), (0, 4), (0, 4)]
TESTS::
sage: StrongTableau([],2).cell_of_highest_head(1)
(0, 0)
"""
Tlist = SkewTableau(self.to_standard_list())
if Tlist==[]:
return (0, 0)
r = len(Tlist[0])
dout = (0, r)
for d in range(-len(Tlist),r+1):
for c in Tlist.cells_by_content(d):
if nabs(Tlist[c[0]][c[1]])==v:
dout = c
if dout!=(0, r) and dout[1]-dout[0]!=d:
return dout
return dout
|
[
"def",
"cell_of_highest_head",
"(",
"self",
",",
"v",
")",
":",
"Tlist",
"=",
"SkewTableau",
"(",
"self",
".",
"to_standard_list",
"(",
")",
")",
"if",
"Tlist",
"==",
"[",
"]",
":",
"return",
"(",
"0",
",",
"0",
")",
"r",
"=",
"len",
"(",
"Tlist",
"[",
"0",
"]",
")",
"dout",
"=",
"(",
"0",
",",
"r",
")",
"for",
"d",
"in",
"range",
"(",
"-",
"len",
"(",
"Tlist",
")",
",",
"r",
"+",
"1",
")",
":",
"for",
"c",
"in",
"Tlist",
".",
"cells_by_content",
"(",
"d",
")",
":",
"if",
"nabs",
"(",
"Tlist",
"[",
"c",
"[",
"0",
"]",
"]",
"[",
"c",
"[",
"1",
"]",
"]",
")",
"==",
"v",
":",
"dout",
"=",
"c",
"if",
"dout",
"!=",
"(",
"0",
",",
"r",
")",
"and",
"dout",
"[",
"1",
"]",
"-",
"dout",
"[",
"0",
"]",
"!=",
"d",
":",
"return",
"dout",
"return",
"dout"
] |
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/combinat/k_tableau.py#L2817-L2861
|
|
hzlzh/AlfredWorkflow.com
|
7055f14f6922c80ea5943839eb0caff11ae57255
|
Sources/Workflows/SafariBookmark/workflow/update.py
|
python
|
Version.__str__
|
(self)
|
return vstr
|
Return semantic version string.
|
Return semantic version string.
|
[
"Return",
"semantic",
"version",
"string",
"."
] |
def __str__(self):
"""Return semantic version string."""
vstr = '{0}.{1}.{2}'.format(self.major, self.minor, self.patch)
if self.suffix:
vstr = '{0}-{1}'.format(vstr, self.suffix)
if self.build:
vstr = '{0}+{1}'.format(vstr, self.build)
return vstr
|
[
"def",
"__str__",
"(",
"self",
")",
":",
"vstr",
"=",
"'{0}.{1}.{2}'",
".",
"format",
"(",
"self",
".",
"major",
",",
"self",
".",
"minor",
",",
"self",
".",
"patch",
")",
"if",
"self",
".",
"suffix",
":",
"vstr",
"=",
"'{0}-{1}'",
".",
"format",
"(",
"vstr",
",",
"self",
".",
"suffix",
")",
"if",
"self",
".",
"build",
":",
"vstr",
"=",
"'{0}+{1}'",
".",
"format",
"(",
"vstr",
",",
"self",
".",
"build",
")",
"return",
"vstr"
] |
https://github.com/hzlzh/AlfredWorkflow.com/blob/7055f14f6922c80ea5943839eb0caff11ae57255/Sources/Workflows/SafariBookmark/workflow/update.py#L182-L189
|
|
log2timeline/plaso
|
fe2e316b8c76a0141760c0f2f181d84acb83abc2
|
plaso/parsers/plist_plugins/timemachine.py
|
python
|
TimeMachinePlugin._ParsePlist
|
(self, parser_mediator, match=None, **unused_kwargs)
|
Extracts relevant TimeMachine entries.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS.
|
Extracts relevant TimeMachine entries.
|
[
"Extracts",
"relevant",
"TimeMachine",
"entries",
"."
] |
def _ParsePlist(self, parser_mediator, match=None, **unused_kwargs):
"""Extracts relevant TimeMachine entries.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS.
"""
backup_alias_map = self._GetDataTypeMap('timemachine_backup_alias')
destinations = match.get('Destinations', [])
for destination in destinations:
backup_alias_data = destination.get('BackupAlias', b'')
try:
backup_alias = self._ReadStructureFromByteStream(
backup_alias_data, 0, backup_alias_map)
alias = backup_alias.string
except (ValueError, errors.ParseError) as exception:
parser_mediator.ProduceExtractionWarning(
'unable to parse backup alias value with error: {0!s}'.format(
exception))
alias = 'Unknown alias'
destination_identifier = (
destination.get('DestinationID', None) or 'Unknown device')
event_data = plist_event.PlistTimeEventData()
event_data.desc = 'TimeMachine Backup in {0:s} ({1:s})'.format(
alias, destination_identifier)
event_data.key = 'item/SnapshotDates'
event_data.root = '/Destinations'
snapshot_dates = destination.get('SnapshotDates', [])
for datetime_value in snapshot_dates:
date_time = dfdatetime_time_elements.TimeElementsInMicroseconds()
date_time.CopyFromDatetime(datetime_value)
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_WRITTEN)
parser_mediator.ProduceEventWithEventData(event, event_data)
|
[
"def",
"_ParsePlist",
"(",
"self",
",",
"parser_mediator",
",",
"match",
"=",
"None",
",",
"*",
"*",
"unused_kwargs",
")",
":",
"backup_alias_map",
"=",
"self",
".",
"_GetDataTypeMap",
"(",
"'timemachine_backup_alias'",
")",
"destinations",
"=",
"match",
".",
"get",
"(",
"'Destinations'",
",",
"[",
"]",
")",
"for",
"destination",
"in",
"destinations",
":",
"backup_alias_data",
"=",
"destination",
".",
"get",
"(",
"'BackupAlias'",
",",
"b''",
")",
"try",
":",
"backup_alias",
"=",
"self",
".",
"_ReadStructureFromByteStream",
"(",
"backup_alias_data",
",",
"0",
",",
"backup_alias_map",
")",
"alias",
"=",
"backup_alias",
".",
"string",
"except",
"(",
"ValueError",
",",
"errors",
".",
"ParseError",
")",
"as",
"exception",
":",
"parser_mediator",
".",
"ProduceExtractionWarning",
"(",
"'unable to parse backup alias value with error: {0!s}'",
".",
"format",
"(",
"exception",
")",
")",
"alias",
"=",
"'Unknown alias'",
"destination_identifier",
"=",
"(",
"destination",
".",
"get",
"(",
"'DestinationID'",
",",
"None",
")",
"or",
"'Unknown device'",
")",
"event_data",
"=",
"plist_event",
".",
"PlistTimeEventData",
"(",
")",
"event_data",
".",
"desc",
"=",
"'TimeMachine Backup in {0:s} ({1:s})'",
".",
"format",
"(",
"alias",
",",
"destination_identifier",
")",
"event_data",
".",
"key",
"=",
"'item/SnapshotDates'",
"event_data",
".",
"root",
"=",
"'/Destinations'",
"snapshot_dates",
"=",
"destination",
".",
"get",
"(",
"'SnapshotDates'",
",",
"[",
"]",
")",
"for",
"datetime_value",
"in",
"snapshot_dates",
":",
"date_time",
"=",
"dfdatetime_time_elements",
".",
"TimeElementsInMicroseconds",
"(",
")",
"date_time",
".",
"CopyFromDatetime",
"(",
"datetime_value",
")",
"event",
"=",
"time_events",
".",
"DateTimeValuesEvent",
"(",
"date_time",
",",
"definitions",
".",
"TIME_DESCRIPTION_WRITTEN",
")",
"parser_mediator",
".",
"ProduceEventWithEventData",
"(",
"event",
",",
"event_data",
")"
] |
https://github.com/log2timeline/plaso/blob/fe2e316b8c76a0141760c0f2f181d84acb83abc2/plaso/parsers/plist_plugins/timemachine.py#L44-L84
|
||
tensorflow/federated
|
5a60a032360087b8f4c7fcfd97ed1c0131c3eac3
|
tensorflow_federated/python/program/data_source.py
|
python
|
FederatedDataSource.federated_type
|
(self)
|
The type of the data returned by calling `select` on an iterator.
|
The type of the data returned by calling `select` on an iterator.
|
[
"The",
"type",
"of",
"the",
"data",
"returned",
"by",
"calling",
"select",
"on",
"an",
"iterator",
"."
] |
def federated_type(self) -> computation_types.FederatedType:
"""The type of the data returned by calling `select` on an iterator."""
raise NotImplementedError
|
[
"def",
"federated_type",
"(",
"self",
")",
"->",
"computation_types",
".",
"FederatedType",
":",
"raise",
"NotImplementedError"
] |
https://github.com/tensorflow/federated/blob/5a60a032360087b8f4c7fcfd97ed1c0131c3eac3/tensorflow_federated/python/program/data_source.py#L127-L129
|
||
karanchahal/distiller
|
a17ec06cbeafcdd2aea19d7c7663033c951392f5
|
distill_archive/research_seed/baselines/segmentation/transforms.py
|
python
|
CenterCrop.__init__
|
(self, size)
|
[] |
def __init__(self, size):
self.size = size
|
[
"def",
"__init__",
"(",
"self",
",",
"size",
")",
":",
"self",
".",
"size",
"=",
"size"
] |
https://github.com/karanchahal/distiller/blob/a17ec06cbeafcdd2aea19d7c7663033c951392f5/distill_archive/research_seed/baselines/segmentation/transforms.py#L69-L70
|
||||
oracle/graalpython
|
577e02da9755d916056184ec441c26e00b70145c
|
graalpython/lib-python/3/contextlib.py
|
python
|
ContextDecorator.__call__
|
(self, func)
|
return inner
|
[] |
def __call__(self, func):
@wraps(func)
def inner(*args, **kwds):
with self._recreate_cm():
return func(*args, **kwds)
return inner
|
[
"def",
"__call__",
"(",
"self",
",",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"inner",
"(",
"*",
"args",
",",
"*",
"*",
"kwds",
")",
":",
"with",
"self",
".",
"_recreate_cm",
"(",
")",
":",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwds",
")",
"return",
"inner"
] |
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/contextlib.py#L71-L76
|
|||
hyperledger/aries-cloudagent-python
|
2f36776e99f6053ae92eed8123b5b1b2e891c02a
|
aries_cloudagent/protocols/present_proof/v2_0/manager.py
|
python
|
V20PresManager.create_exchange_for_proposal
|
(
self,
connection_id: str,
pres_proposal_message: V20PresProposal,
auto_present: bool = None,
)
|
return pres_ex_record
|
Create a presentation exchange record for input presentation proposal.
Args:
connection_id: connection identifier
pres_proposal_message: presentation proposal to serialize
to exchange record
auto_present: whether to present proof upon receiving proof request
(default to configuration setting)
Returns:
Presentation exchange record, created
|
Create a presentation exchange record for input presentation proposal.
|
[
"Create",
"a",
"presentation",
"exchange",
"record",
"for",
"input",
"presentation",
"proposal",
"."
] |
async def create_exchange_for_proposal(
self,
connection_id: str,
pres_proposal_message: V20PresProposal,
auto_present: bool = None,
):
"""
Create a presentation exchange record for input presentation proposal.
Args:
connection_id: connection identifier
pres_proposal_message: presentation proposal to serialize
to exchange record
auto_present: whether to present proof upon receiving proof request
(default to configuration setting)
Returns:
Presentation exchange record, created
"""
pres_ex_record = V20PresExRecord(
connection_id=connection_id,
thread_id=pres_proposal_message._thread_id,
initiator=V20PresExRecord.INITIATOR_SELF,
role=V20PresExRecord.ROLE_PROVER,
state=V20PresExRecord.STATE_PROPOSAL_SENT,
pres_proposal=pres_proposal_message,
auto_present=auto_present,
trace=(pres_proposal_message._trace is not None),
)
async with self._profile.session() as session:
await pres_ex_record.save(
session, reason="create v2.0 presentation proposal"
)
return pres_ex_record
|
[
"async",
"def",
"create_exchange_for_proposal",
"(",
"self",
",",
"connection_id",
":",
"str",
",",
"pres_proposal_message",
":",
"V20PresProposal",
",",
"auto_present",
":",
"bool",
"=",
"None",
",",
")",
":",
"pres_ex_record",
"=",
"V20PresExRecord",
"(",
"connection_id",
"=",
"connection_id",
",",
"thread_id",
"=",
"pres_proposal_message",
".",
"_thread_id",
",",
"initiator",
"=",
"V20PresExRecord",
".",
"INITIATOR_SELF",
",",
"role",
"=",
"V20PresExRecord",
".",
"ROLE_PROVER",
",",
"state",
"=",
"V20PresExRecord",
".",
"STATE_PROPOSAL_SENT",
",",
"pres_proposal",
"=",
"pres_proposal_message",
",",
"auto_present",
"=",
"auto_present",
",",
"trace",
"=",
"(",
"pres_proposal_message",
".",
"_trace",
"is",
"not",
"None",
")",
",",
")",
"async",
"with",
"self",
".",
"_profile",
".",
"session",
"(",
")",
"as",
"session",
":",
"await",
"pres_ex_record",
".",
"save",
"(",
"session",
",",
"reason",
"=",
"\"create v2.0 presentation proposal\"",
")",
"return",
"pres_ex_record"
] |
https://github.com/hyperledger/aries-cloudagent-python/blob/2f36776e99f6053ae92eed8123b5b1b2e891c02a/aries_cloudagent/protocols/present_proof/v2_0/manager.py#L42-L78
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.