nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
sequence | function
stringlengths 34
151k
| function_tokens
sequence | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
kevinlin311tw/caffe-cvprw15 | 45c2a1bf0368569c54e0be4edf8d34285cf79e70 | scripts/cpp_lint.py | python | CleanseComments | (line) | return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line) | Removes //-comments and single-line C-style /* */ comments.
Args:
line: A line of C++ source.
Returns:
The line with single-line comments removed. | Removes //-comments and single-line C-style /* */ comments. | [
"Removes",
"//",
"-",
"comments",
"and",
"single",
"-",
"line",
"C",
"-",
"style",
"/",
"*",
"*",
"/",
"comments",
"."
] | def CleanseComments(line):
"""Removes //-comments and single-line C-style /* */ comments.
Args:
line: A line of C++ source.
Returns:
The line with single-line comments removed.
"""
commentpos = line.find('//')
if commentpos != -1 and not IsCppString(line[:commentpos]):
line = line[:commentpos].rstrip()
# get rid of /* ... */
return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line) | [
"def",
"CleanseComments",
"(",
"line",
")",
":",
"commentpos",
"=",
"line",
".",
"find",
"(",
"'//'",
")",
"if",
"commentpos",
"!=",
"-",
"1",
"and",
"not",
"IsCppString",
"(",
"line",
"[",
":",
"commentpos",
"]",
")",
":",
"line",
"=",
"line",
"[",
":",
"commentpos",
"]",
".",
"rstrip",
"(",
")",
"# get rid of /* ... */",
"return",
"_RE_PATTERN_CLEANSE_LINE_C_COMMENTS",
".",
"sub",
"(",
"''",
",",
"line",
")"
] | https://github.com/kevinlin311tw/caffe-cvprw15/blob/45c2a1bf0368569c54e0be4edf8d34285cf79e70/scripts/cpp_lint.py#L1167-L1180 |
|
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/ops/composite/multitype_ops/setitem_impl.py | python | _tensor_setitem_by_tuple_with_tensor | (data, tuple_index, value) | return compile_utils.tensor_setitem_by_tuple_with_tensor(data, tuple_index, value) | Tensor assignment.
Note:
Syntax support: A[B, C, D] = U.
Restraint condition: 1) A is a Tensor, and B, C, D are index Tensors.
2) U is a Tensor.
Inputs:
data (Tensor): Assigned tensor.
index (Tuple): An index tuple.
value (Tensor): Assignment tensor, should has the same data type as 'data'.
Outputs:
Tensor, element type and shape is same as data. | Tensor assignment. | [
"Tensor",
"assignment",
"."
] | def _tensor_setitem_by_tuple_with_tensor(data, tuple_index, value):
"""
Tensor assignment.
Note:
Syntax support: A[B, C, D] = U.
Restraint condition: 1) A is a Tensor, and B, C, D are index Tensors.
2) U is a Tensor.
Inputs:
data (Tensor): Assigned tensor.
index (Tuple): An index tuple.
value (Tensor): Assignment tensor, should has the same data type as 'data'.
Outputs:
Tensor, element type and shape is same as data.
"""
return compile_utils.tensor_setitem_by_tuple_with_tensor(data, tuple_index, value) | [
"def",
"_tensor_setitem_by_tuple_with_tensor",
"(",
"data",
",",
"tuple_index",
",",
"value",
")",
":",
"return",
"compile_utils",
".",
"tensor_setitem_by_tuple_with_tensor",
"(",
"data",
",",
"tuple_index",
",",
"value",
")"
] | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/composite/multitype_ops/setitem_impl.py#L221-L238 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/_controls.py | python | Gauge.__init__ | (self, *args, **kwargs) | __init__(self, Window parent, int id=-1, int range=100, Point pos=DefaultPosition,
Size size=DefaultSize, long style=GA_HORIZONTAL,
Validator validator=DefaultValidator,
String name=GaugeNameStr) -> Gauge | __init__(self, Window parent, int id=-1, int range=100, Point pos=DefaultPosition,
Size size=DefaultSize, long style=GA_HORIZONTAL,
Validator validator=DefaultValidator,
String name=GaugeNameStr) -> Gauge | [
"__init__",
"(",
"self",
"Window",
"parent",
"int",
"id",
"=",
"-",
"1",
"int",
"range",
"=",
"100",
"Point",
"pos",
"=",
"DefaultPosition",
"Size",
"size",
"=",
"DefaultSize",
"long",
"style",
"=",
"GA_HORIZONTAL",
"Validator",
"validator",
"=",
"DefaultValidator",
"String",
"name",
"=",
"GaugeNameStr",
")",
"-",
">",
"Gauge"
] | def __init__(self, *args, **kwargs):
"""
__init__(self, Window parent, int id=-1, int range=100, Point pos=DefaultPosition,
Size size=DefaultSize, long style=GA_HORIZONTAL,
Validator validator=DefaultValidator,
String name=GaugeNameStr) -> Gauge
"""
_controls_.Gauge_swiginit(self,_controls_.new_Gauge(*args, **kwargs))
self._setOORInfo(self) | [
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"_controls_",
".",
"Gauge_swiginit",
"(",
"self",
",",
"_controls_",
".",
"new_Gauge",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")",
"self",
".",
"_setOORInfo",
"(",
"self",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_controls.py#L728-L736 |
||
netket/netket | 0d534e54ecbf25b677ea72af6b85947979420652 | netket/hilbert/abstract_hilbert.py | python | AbstractHilbert._attrs | (self) | Tuple of hashable attributs, used to compute the immutable
hash of this Hilbert space | Tuple of hashable attributs, used to compute the immutable
hash of this Hilbert space | [
"Tuple",
"of",
"hashable",
"attributs",
"used",
"to",
"compute",
"the",
"immutable",
"hash",
"of",
"this",
"Hilbert",
"space"
] | def _attrs(self) -> Tuple:
"""
Tuple of hashable attributs, used to compute the immutable
hash of this Hilbert space
""" | [
"def",
"_attrs",
"(",
"self",
")",
"->",
"Tuple",
":"
] | https://github.com/netket/netket/blob/0d534e54ecbf25b677ea72af6b85947979420652/netket/hilbert/abstract_hilbert.py#L107-L111 |
||
google/syzygy | 8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5 | third_party/numpy/files/numpy/oldnumeric/ma.py | python | MaskedArray.__abs__ | (self) | return absolute(self) | Return absolute(self) | Return absolute(self) | [
"Return",
"absolute",
"(",
"self",
")"
] | def __abs__(self):
"Return absolute(self)"
return absolute(self) | [
"def",
"__abs__",
"(",
"self",
")",
":",
"return",
"absolute",
"(",
"self",
")"
] | https://github.com/google/syzygy/blob/8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5/third_party/numpy/files/numpy/oldnumeric/ma.py#L887-L889 |
|
epam/Indigo | 30e40b4b1eb9bae0207435a26cfcb81ddcc42be1 | api/python/indigo/__init__.py | python | Indigo.nameToStructure | (self, name, params=None) | return self.IndigoObject(
self,
self._checkResult(
Indigo._lib.indigoNameToStructure(
name.encode(ENCODE_ENCODING),
params.encode(ENCODE_ENCODING),
)
),
) | Converts a chemical name into a corresponding structure
Args:
name (str): a name to parse
params (str): a string (optional) containing parsing options or None if no options are changed
Raises:
IndigoException: if parsing fails or no structure is found | Converts a chemical name into a corresponding structure | [
"Converts",
"a",
"chemical",
"name",
"into",
"a",
"corresponding",
"structure"
] | def nameToStructure(self, name, params=None):
"""
Converts a chemical name into a corresponding structure
Args:
name (str): a name to parse
params (str): a string (optional) containing parsing options or None if no options are changed
Raises:
IndigoException: if parsing fails or no structure is found
"""
if params is None:
params = ""
self._setSessionId()
return self.IndigoObject(
self,
self._checkResult(
Indigo._lib.indigoNameToStructure(
name.encode(ENCODE_ENCODING),
params.encode(ENCODE_ENCODING),
)
),
) | [
"def",
"nameToStructure",
"(",
"self",
",",
"name",
",",
"params",
"=",
"None",
")",
":",
"if",
"params",
"is",
"None",
":",
"params",
"=",
"\"\"",
"self",
".",
"_setSessionId",
"(",
")",
"return",
"self",
".",
"IndigoObject",
"(",
"self",
",",
"self",
".",
"_checkResult",
"(",
"Indigo",
".",
"_lib",
".",
"indigoNameToStructure",
"(",
"name",
".",
"encode",
"(",
"ENCODE_ENCODING",
")",
",",
"params",
".",
"encode",
"(",
"ENCODE_ENCODING",
")",
",",
")",
")",
",",
")"
] | https://github.com/epam/Indigo/blob/30e40b4b1eb9bae0207435a26cfcb81ddcc42be1/api/python/indigo/__init__.py#L6495-L6518 |
|
forkineye/ESPixelStick | 22926f1c0d1131f1369fc7cad405689a095ae3cb | dist/bin/pyserial/serial/rfc2217.py | python | Serial.rfc2217_send_purge | (self, value) | \
Send purge request to the remote.
(PURGE_RECEIVE_BUFFER / PURGE_TRANSMIT_BUFFER / PURGE_BOTH_BUFFERS) | \
Send purge request to the remote.
(PURGE_RECEIVE_BUFFER / PURGE_TRANSMIT_BUFFER / PURGE_BOTH_BUFFERS) | [
"\\",
"Send",
"purge",
"request",
"to",
"the",
"remote",
".",
"(",
"PURGE_RECEIVE_BUFFER",
"/",
"PURGE_TRANSMIT_BUFFER",
"/",
"PURGE_BOTH_BUFFERS",
")"
] | def rfc2217_send_purge(self, value):
"""\
Send purge request to the remote.
(PURGE_RECEIVE_BUFFER / PURGE_TRANSMIT_BUFFER / PURGE_BOTH_BUFFERS)
"""
item = self._rfc2217_options['purge']
item.set(value) # transmit desired purge type
item.wait(self._network_timeout) | [
"def",
"rfc2217_send_purge",
"(",
"self",
",",
"value",
")",
":",
"item",
"=",
"self",
".",
"_rfc2217_options",
"[",
"'purge'",
"]",
"item",
".",
"set",
"(",
"value",
")",
"# transmit desired purge type",
"item",
".",
"wait",
"(",
"self",
".",
"_network_timeout",
")"
] | https://github.com/forkineye/ESPixelStick/blob/22926f1c0d1131f1369fc7cad405689a095ae3cb/dist/bin/pyserial/serial/rfc2217.py#L863-L870 |
||
Kitware/ParaView | f760af9124ff4634b23ebbeab95a4f56e0261955 | Wrapping/Python/paraview/coprocessing.py | python | CoProcessor.SetDataRootDirectory | (self, root_directory) | Specify root directory for data extracts | Specify root directory for data extracts | [
"Specify",
"root",
"directory",
"for",
"data",
"extracts"
] | def SetDataRootDirectory(self, root_directory):
"""Specify root directory for data extracts"""
if root_directory and not root_directory.endswith("/"):
root_directory = root_directory + "/"
self.__DataRootDirectory = root_directory | [
"def",
"SetDataRootDirectory",
"(",
"self",
",",
"root_directory",
")",
":",
"if",
"root_directory",
"and",
"not",
"root_directory",
".",
"endswith",
"(",
"\"/\"",
")",
":",
"root_directory",
"=",
"root_directory",
"+",
"\"/\"",
"self",
".",
"__DataRootDirectory",
"=",
"root_directory"
] | https://github.com/Kitware/ParaView/blob/f760af9124ff4634b23ebbeab95a4f56e0261955/Wrapping/Python/paraview/coprocessing.py#L891-L895 |
||
tensorflow/io | 92b44e180674a8af0e12e405530f7343e3e693e4 | tensorflow_io/python/ops/audio_ops.py | python | encode_aac | (input, rate, name=None) | return core_ops.io_audio_encode_aac(input, rate, name=name) | Encode MP4(AAC) audio into string.
Args:
input: A `Tensor` of the audio input.
rate: The sample rate of the audio.
name: A name for the operation (optional).
Returns:
output: Encoded audio. | Encode MP4(AAC) audio into string. | [
"Encode",
"MP4",
"(",
"AAC",
")",
"audio",
"into",
"string",
"."
] | def encode_aac(input, rate, name=None): # pylint: disable=redefined-builtin
"""Encode MP4(AAC) audio into string.
Args:
input: A `Tensor` of the audio input.
rate: The sample rate of the audio.
name: A name for the operation (optional).
Returns:
output: Encoded audio.
"""
if sys.platform == "linux":
try:
from tensorflow_io.python.ops import ( # pylint: disable=import-outside-toplevel,unused-import
ffmpeg_ops,
)
except NotImplementedError:
pass
return core_ops.io_audio_encode_aac(input, rate, name=name) | [
"def",
"encode_aac",
"(",
"input",
",",
"rate",
",",
"name",
"=",
"None",
")",
":",
"# pylint: disable=redefined-builtin",
"if",
"sys",
".",
"platform",
"==",
"\"linux\"",
":",
"try",
":",
"from",
"tensorflow_io",
".",
"python",
".",
"ops",
"import",
"(",
"# pylint: disable=import-outside-toplevel,unused-import",
"ffmpeg_ops",
",",
")",
"except",
"NotImplementedError",
":",
"pass",
"return",
"core_ops",
".",
"io_audio_encode_aac",
"(",
"input",
",",
"rate",
",",
"name",
"=",
"name",
")"
] | https://github.com/tensorflow/io/blob/92b44e180674a8af0e12e405530f7343e3e693e4/tensorflow_io/python/ops/audio_ops.py#L633-L651 |
|
Kitware/ParaView | f760af9124ff4634b23ebbeab95a4f56e0261955 | Wrapping/Python/paraview/simple.py | python | Show | (proxy=None, view=None, representationType=None, **params) | return rep | Turns the visibility of a given pipeline object on in the given view.
If pipeline object and/or view are not specified, active objects are used. | Turns the visibility of a given pipeline object on in the given view.
If pipeline object and/or view are not specified, active objects are used. | [
"Turns",
"the",
"visibility",
"of",
"a",
"given",
"pipeline",
"object",
"on",
"in",
"the",
"given",
"view",
".",
"If",
"pipeline",
"object",
"and",
"/",
"or",
"view",
"are",
"not",
"specified",
"active",
"objects",
"are",
"used",
"."
] | def Show(proxy=None, view=None, representationType=None, **params):
"""Turns the visibility of a given pipeline object on in the given view.
If pipeline object and/or view are not specified, active objects are used."""
if proxy == None:
proxy = GetActiveSource()
if not hasattr(proxy, "GetNumberOfOutputPorts") or proxy.GetNumberOfOutputPorts() == 0:
raise RuntimeError('Cannot show a sink i.e. algorithm with no output.')
if proxy == None:
raise RuntimeError ("Show() needs a proxy argument or that an active source is set.")
if not view:
# If there's no active view, controller.Show() will create a new preferred view.
# if possible.
view = active_objects.view
controller = servermanager.ParaViewPipelineController()
rep = controller.Show(proxy, proxy.Port, view, representationType)
if rep == None:
raise RuntimeError ("Could not create a representation object for proxy %s" % proxy.GetXMLLabel())
for param in params.keys():
setattr(rep, param, params[param])
return rep | [
"def",
"Show",
"(",
"proxy",
"=",
"None",
",",
"view",
"=",
"None",
",",
"representationType",
"=",
"None",
",",
"*",
"*",
"params",
")",
":",
"if",
"proxy",
"==",
"None",
":",
"proxy",
"=",
"GetActiveSource",
"(",
")",
"if",
"not",
"hasattr",
"(",
"proxy",
",",
"\"GetNumberOfOutputPorts\"",
")",
"or",
"proxy",
".",
"GetNumberOfOutputPorts",
"(",
")",
"==",
"0",
":",
"raise",
"RuntimeError",
"(",
"'Cannot show a sink i.e. algorithm with no output.'",
")",
"if",
"proxy",
"==",
"None",
":",
"raise",
"RuntimeError",
"(",
"\"Show() needs a proxy argument or that an active source is set.\"",
")",
"if",
"not",
"view",
":",
"# If there's no active view, controller.Show() will create a new preferred view.",
"# if possible.",
"view",
"=",
"active_objects",
".",
"view",
"controller",
"=",
"servermanager",
".",
"ParaViewPipelineController",
"(",
")",
"rep",
"=",
"controller",
".",
"Show",
"(",
"proxy",
",",
"proxy",
".",
"Port",
",",
"view",
",",
"representationType",
")",
"if",
"rep",
"==",
"None",
":",
"raise",
"RuntimeError",
"(",
"\"Could not create a representation object for proxy %s\"",
"%",
"proxy",
".",
"GetXMLLabel",
"(",
")",
")",
"for",
"param",
"in",
"params",
".",
"keys",
"(",
")",
":",
"setattr",
"(",
"rep",
",",
"param",
",",
"params",
"[",
"param",
"]",
")",
"return",
"rep"
] | https://github.com/Kitware/ParaView/blob/f760af9124ff4634b23ebbeab95a4f56e0261955/Wrapping/Python/paraview/simple.py#L724-L743 |
|
etternagame/etterna | 8775f74ac9c353320128609d4b4150672e9a6d04 | extern/SQLiteCpp/cpplint.py | python | _SetCountingStyle | (level) | Sets the module's counting options. | Sets the module's counting options. | [
"Sets",
"the",
"module",
"s",
"counting",
"options",
"."
] | def _SetCountingStyle(level):
"""Sets the module's counting options."""
_cpplint_state.SetCountingStyle(level) | [
"def",
"_SetCountingStyle",
"(",
"level",
")",
":",
"_cpplint_state",
".",
"SetCountingStyle",
"(",
"level",
")"
] | https://github.com/etternagame/etterna/blob/8775f74ac9c353320128609d4b4150672e9a6d04/extern/SQLiteCpp/cpplint.py#L778-L780 |
||
bulletphysics/bullet3 | f0f2a952e146f016096db6f85cf0c44ed75b0b9a | examples/pybullet/gym/pybullet_envs/minitaur/robots/robot_base.py | python | RobotBase.apply_action | (self, action) | Applies the action to the robot. | Applies the action to the robot. | [
"Applies",
"the",
"action",
"to",
"the",
"robot",
"."
] | def apply_action(self, action):
"""Applies the action to the robot."""
pass | [
"def",
"apply_action",
"(",
"self",
",",
"action",
")",
":",
"pass"
] | https://github.com/bulletphysics/bullet3/blob/f0f2a952e146f016096db6f85cf0c44ed75b0b9a/examples/pybullet/gym/pybullet_envs/minitaur/robots/robot_base.py#L64-L66 |
||
MythTV/mythtv | d282a209cb8be85d036f85a62a8ec971b67d45f4 | mythtv/programs/scripts/internetcontent/nv_python_libs/mtv/mtv_api.py | python | Videos.searchForVideos | (self, title, pagenumber) | return None | Common name for a video search. Used to interface with MythTV plugin NetVision | Common name for a video search. Used to interface with MythTV plugin NetVision | [
"Common",
"name",
"for",
"a",
"video",
"search",
".",
"Used",
"to",
"interface",
"with",
"MythTV",
"plugin",
"NetVision"
] | def searchForVideos(self, title, pagenumber):
"""Common name for a video search. Used to interface with MythTV plugin NetVision
"""
# v2 api calls - An example that must be customized for each target site
if self.grabber_title == 'MTV':
self.config['urls']['video.search'] = "http://api.mtvnservices.com/1/video/search/?term=%s&start-index=%s&max-results=%s"
elif self.grabber_title == 'MTV Artists': # This search type is not currently implemented
self.config['urls']['video.search'] = "http://api.mtvnservices.com/1/artist/search/?term=%s&start-index=%s&max-results=%s"
else:
sys.stderr.write("! Error: MtvInvalidSearchType - The grabber name (%s) is invalid \n" % self.grabber_title)
sys.exit(1)
# Easier for debugging
# print self.searchTitle(title, pagenumber, self.page_limit)
# print
# sys.exit()
startindex = (int(pagenumber) -1) * self.page_limit + 1
try:
data = self.searchTitle(title, startindex, self.page_limit)
except MtvVideoNotFound as msg:
sys.stderr.write("%s\n" % msg)
return None
except MtvUrlError as msg:
sys.stderr.write('%s\n' % msg)
sys.exit(1)
except MtvHttpError as msg:
sys.stderr.write(self.error_messages['MtvHttpError'] % msg)
sys.exit(1)
except MtvRssError as msg:
sys.stderr.write(self.error_messages['MtvRssError'] % msg)
sys.exit(1)
except Exception as e:
sys.stderr.write("! Error: Unknown error during a Video search (%s)\nError(%s)\n" % (title, e))
sys.exit(1)
if data is None:
return None
if not len(data):
return None
items = []
for match in data:
item_data = {}
for key in list(self.key_translation[1].keys()):
if key in list(match.keys()):
item_data[self.key_translation[1][key]] = match[key]
else:
item_data[self.key_translation[1][key]] = ''
items.append(item_data)
# Channel details and search results
channel = {'channel_title': 'MTV', 'channel_link': 'http://www.mtv.com', 'channel_description': "Visit MTV (Music Television) for TV shows, music videos, celebrity photos, news.", 'channel_numresults': 0, 'channel_returned': 1, 'channel_startindex': 0}
if len(items) == self.page_limit:
channel['channel_numresults'] = self.page_limit * int(pagenumber) + 1
elif len(items) < self.page_limit:
channel['channel_numresults'] = self.page_limit * (int(pagenumber)-1) + len(items)
else:
channel['channel_numresults'] = self.page_limit * int(pagenumber)
channel['channel_startindex'] = self.page_limit * int(pagenumber)
channel['channel_returned'] = len(items)
if len(items):
return [[channel, items]]
return None | [
"def",
"searchForVideos",
"(",
"self",
",",
"title",
",",
"pagenumber",
")",
":",
"# v2 api calls - An example that must be customized for each target site",
"if",
"self",
".",
"grabber_title",
"==",
"'MTV'",
":",
"self",
".",
"config",
"[",
"'urls'",
"]",
"[",
"'video.search'",
"]",
"=",
"\"http://api.mtvnservices.com/1/video/search/?term=%s&start-index=%s&max-results=%s\"",
"elif",
"self",
".",
"grabber_title",
"==",
"'MTV Artists'",
":",
"# This search type is not currently implemented",
"self",
".",
"config",
"[",
"'urls'",
"]",
"[",
"'video.search'",
"]",
"=",
"\"http://api.mtvnservices.com/1/artist/search/?term=%s&start-index=%s&max-results=%s\"",
"else",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"! Error: MtvInvalidSearchType - The grabber name (%s) is invalid \\n\"",
"%",
"self",
".",
"grabber_title",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"# Easier for debugging",
"# print self.searchTitle(title, pagenumber, self.page_limit)",
"# print",
"# sys.exit()",
"startindex",
"=",
"(",
"int",
"(",
"pagenumber",
")",
"-",
"1",
")",
"*",
"self",
".",
"page_limit",
"+",
"1",
"try",
":",
"data",
"=",
"self",
".",
"searchTitle",
"(",
"title",
",",
"startindex",
",",
"self",
".",
"page_limit",
")",
"except",
"MtvVideoNotFound",
"as",
"msg",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"%s\\n\"",
"%",
"msg",
")",
"return",
"None",
"except",
"MtvUrlError",
"as",
"msg",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'%s\\n'",
"%",
"msg",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"except",
"MtvHttpError",
"as",
"msg",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"self",
".",
"error_messages",
"[",
"'MtvHttpError'",
"]",
"%",
"msg",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"except",
"MtvRssError",
"as",
"msg",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"self",
".",
"error_messages",
"[",
"'MtvRssError'",
"]",
"%",
"msg",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"except",
"Exception",
"as",
"e",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"! Error: Unknown error during a Video search (%s)\\nError(%s)\\n\"",
"%",
"(",
"title",
",",
"e",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"data",
"is",
"None",
":",
"return",
"None",
"if",
"not",
"len",
"(",
"data",
")",
":",
"return",
"None",
"items",
"=",
"[",
"]",
"for",
"match",
"in",
"data",
":",
"item_data",
"=",
"{",
"}",
"for",
"key",
"in",
"list",
"(",
"self",
".",
"key_translation",
"[",
"1",
"]",
".",
"keys",
"(",
")",
")",
":",
"if",
"key",
"in",
"list",
"(",
"match",
".",
"keys",
"(",
")",
")",
":",
"item_data",
"[",
"self",
".",
"key_translation",
"[",
"1",
"]",
"[",
"key",
"]",
"]",
"=",
"match",
"[",
"key",
"]",
"else",
":",
"item_data",
"[",
"self",
".",
"key_translation",
"[",
"1",
"]",
"[",
"key",
"]",
"]",
"=",
"''",
"items",
".",
"append",
"(",
"item_data",
")",
"# Channel details and search results",
"channel",
"=",
"{",
"'channel_title'",
":",
"'MTV'",
",",
"'channel_link'",
":",
"'http://www.mtv.com'",
",",
"'channel_description'",
":",
"\"Visit MTV (Music Television) for TV shows, music videos, celebrity photos, news.\"",
",",
"'channel_numresults'",
":",
"0",
",",
"'channel_returned'",
":",
"1",
",",
"'channel_startindex'",
":",
"0",
"}",
"if",
"len",
"(",
"items",
")",
"==",
"self",
".",
"page_limit",
":",
"channel",
"[",
"'channel_numresults'",
"]",
"=",
"self",
".",
"page_limit",
"*",
"int",
"(",
"pagenumber",
")",
"+",
"1",
"elif",
"len",
"(",
"items",
")",
"<",
"self",
".",
"page_limit",
":",
"channel",
"[",
"'channel_numresults'",
"]",
"=",
"self",
".",
"page_limit",
"*",
"(",
"int",
"(",
"pagenumber",
")",
"-",
"1",
")",
"+",
"len",
"(",
"items",
")",
"else",
":",
"channel",
"[",
"'channel_numresults'",
"]",
"=",
"self",
".",
"page_limit",
"*",
"int",
"(",
"pagenumber",
")",
"channel",
"[",
"'channel_startindex'",
"]",
"=",
"self",
".",
"page_limit",
"*",
"int",
"(",
"pagenumber",
")",
"channel",
"[",
"'channel_returned'",
"]",
"=",
"len",
"(",
"items",
")",
"if",
"len",
"(",
"items",
")",
":",
"return",
"[",
"[",
"channel",
",",
"items",
"]",
"]",
"return",
"None"
] | https://github.com/MythTV/mythtv/blob/d282a209cb8be85d036f85a62a8ec971b67d45f4/mythtv/programs/scripts/internetcontent/nv_python_libs/mtv/mtv_api.py#L497-L564 |
|
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/telemetry/third_party/pyserial/serial/rfc2217.py | python | TelnetOption.process_incoming | (self, command) | A DO/DONT/WILL/WONT was received for this option, update state and
answer when needed. | A DO/DONT/WILL/WONT was received for this option, update state and
answer when needed. | [
"A",
"DO",
"/",
"DONT",
"/",
"WILL",
"/",
"WONT",
"was",
"received",
"for",
"this",
"option",
"update",
"state",
"and",
"answer",
"when",
"needed",
"."
] | def process_incoming(self, command):
"""A DO/DONT/WILL/WONT was received for this option, update state and
answer when needed."""
if command == self.ack_yes:
if self.state is REQUESTED:
self.state = ACTIVE
self.active = True
if self.activation_callback is not None:
self.activation_callback()
elif self.state is ACTIVE:
pass
elif self.state is INACTIVE:
self.state = ACTIVE
self.connection.telnetSendOption(self.send_yes, self.option)
self.active = True
if self.activation_callback is not None:
self.activation_callback()
elif self.state is REALLY_INACTIVE:
self.connection.telnetSendOption(self.send_no, self.option)
else:
raise ValueError('option in illegal state %r' % self)
elif command == self.ack_no:
if self.state is REQUESTED:
self.state = INACTIVE
self.active = False
elif self.state is ACTIVE:
self.state = INACTIVE
self.connection.telnetSendOption(self.send_no, self.option)
self.active = False
elif self.state is INACTIVE:
pass
elif self.state is REALLY_INACTIVE:
pass
else:
raise ValueError('option in illegal state %r' % self) | [
"def",
"process_incoming",
"(",
"self",
",",
"command",
")",
":",
"if",
"command",
"==",
"self",
".",
"ack_yes",
":",
"if",
"self",
".",
"state",
"is",
"REQUESTED",
":",
"self",
".",
"state",
"=",
"ACTIVE",
"self",
".",
"active",
"=",
"True",
"if",
"self",
".",
"activation_callback",
"is",
"not",
"None",
":",
"self",
".",
"activation_callback",
"(",
")",
"elif",
"self",
".",
"state",
"is",
"ACTIVE",
":",
"pass",
"elif",
"self",
".",
"state",
"is",
"INACTIVE",
":",
"self",
".",
"state",
"=",
"ACTIVE",
"self",
".",
"connection",
".",
"telnetSendOption",
"(",
"self",
".",
"send_yes",
",",
"self",
".",
"option",
")",
"self",
".",
"active",
"=",
"True",
"if",
"self",
".",
"activation_callback",
"is",
"not",
"None",
":",
"self",
".",
"activation_callback",
"(",
")",
"elif",
"self",
".",
"state",
"is",
"REALLY_INACTIVE",
":",
"self",
".",
"connection",
".",
"telnetSendOption",
"(",
"self",
".",
"send_no",
",",
"self",
".",
"option",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'option in illegal state %r'",
"%",
"self",
")",
"elif",
"command",
"==",
"self",
".",
"ack_no",
":",
"if",
"self",
".",
"state",
"is",
"REQUESTED",
":",
"self",
".",
"state",
"=",
"INACTIVE",
"self",
".",
"active",
"=",
"False",
"elif",
"self",
".",
"state",
"is",
"ACTIVE",
":",
"self",
".",
"state",
"=",
"INACTIVE",
"self",
".",
"connection",
".",
"telnetSendOption",
"(",
"self",
".",
"send_no",
",",
"self",
".",
"option",
")",
"self",
".",
"active",
"=",
"False",
"elif",
"self",
".",
"state",
"is",
"INACTIVE",
":",
"pass",
"elif",
"self",
".",
"state",
"is",
"REALLY_INACTIVE",
":",
"pass",
"else",
":",
"raise",
"ValueError",
"(",
"'option in illegal state %r'",
"%",
"self",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/telemetry/third_party/pyserial/serial/rfc2217.py#L255-L289 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_misc.py | python | DateTime.__iadd__ | (*args) | return _misc_.DateTime___iadd__(*args) | __iadd__(self, TimeSpan diff) -> DateTime
__iadd__(self, DateSpan diff) -> DateTime | __iadd__(self, TimeSpan diff) -> DateTime
__iadd__(self, DateSpan diff) -> DateTime | [
"__iadd__",
"(",
"self",
"TimeSpan",
"diff",
")",
"-",
">",
"DateTime",
"__iadd__",
"(",
"self",
"DateSpan",
"diff",
")",
"-",
">",
"DateTime"
] | def __iadd__(*args):
"""
__iadd__(self, TimeSpan diff) -> DateTime
__iadd__(self, DateSpan diff) -> DateTime
"""
return _misc_.DateTime___iadd__(*args) | [
"def",
"__iadd__",
"(",
"*",
"args",
")",
":",
"return",
"_misc_",
".",
"DateTime___iadd__",
"(",
"*",
"args",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L4077-L4082 |
|
gimli-org/gimli | 17aa2160de9b15ababd9ef99e89b1bc3277bbb23 | pygimli/solver/solver.py | python | _feNorm | (u, mat) | return np.sqrt(pg.math.dot(u, mat.mult(u))) | Create a norm within a Finite Element space.
Create the Finite Element Norm with a preassembled system matrix. | Create a norm within a Finite Element space. | [
"Create",
"a",
"norm",
"within",
"a",
"Finite",
"Element",
"space",
"."
] | def _feNorm(u, mat):
"""Create a norm within a Finite Element space.
Create the Finite Element Norm with a preassembled system matrix.
"""
return np.sqrt(pg.math.dot(u, mat.mult(u))) | [
"def",
"_feNorm",
"(",
"u",
",",
"mat",
")",
":",
"return",
"np",
".",
"sqrt",
"(",
"pg",
".",
"math",
".",
"dot",
"(",
"u",
",",
"mat",
".",
"mult",
"(",
"u",
")",
")",
")"
] | https://github.com/gimli-org/gimli/blob/17aa2160de9b15ababd9ef99e89b1bc3277bbb23/pygimli/solver/solver.py#L2032-L2037 |
|
h2oai/deepwater | 80e345c582e6ef912a31f42707a2f31c01b064da | mxnet/scripts/lint.py | python | process | (fname, allow_type) | Process a file. | Process a file. | [
"Process",
"a",
"file",
"."
] | def process(fname, allow_type):
"""Process a file."""
fname = str(fname)
# HACK: ignore op.h which is automatically generated
if fname.endswith('op.h'):
return
arr = fname.rsplit('.', 1)
if fname.find('#') != -1 or arr[-1] not in allow_type:
return
if arr[-1] in CXX_SUFFIX:
_HELPER.process_cpp(fname, arr[-1])
if arr[-1] in PYTHON_SUFFIX:
_HELPER.process_python(fname) | [
"def",
"process",
"(",
"fname",
",",
"allow_type",
")",
":",
"fname",
"=",
"str",
"(",
"fname",
")",
"# HACK: ignore op.h which is automatically generated",
"if",
"fname",
".",
"endswith",
"(",
"'op.h'",
")",
":",
"return",
"arr",
"=",
"fname",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"if",
"fname",
".",
"find",
"(",
"'#'",
")",
"!=",
"-",
"1",
"or",
"arr",
"[",
"-",
"1",
"]",
"not",
"in",
"allow_type",
":",
"return",
"if",
"arr",
"[",
"-",
"1",
"]",
"in",
"CXX_SUFFIX",
":",
"_HELPER",
".",
"process_cpp",
"(",
"fname",
",",
"arr",
"[",
"-",
"1",
"]",
")",
"if",
"arr",
"[",
"-",
"1",
"]",
"in",
"PYTHON_SUFFIX",
":",
"_HELPER",
".",
"process_python",
"(",
"fname",
")"
] | https://github.com/h2oai/deepwater/blob/80e345c582e6ef912a31f42707a2f31c01b064da/mxnet/scripts/lint.py#L122-L134 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/utils.py | python | percent_encode | (input_str, safe=SAFE_CHARS) | return quote(input_str, safe=safe) | Urlencodes a string.
Whereas percent_encode_sequence handles taking a dict/sequence and
producing a percent encoded string, this function deals only with
taking a string (not a dict/sequence) and percent encoding it.
If given the binary type, will simply URL encode it. If given the
text type, will produce the binary type by UTF-8 encoding the
text. If given something else, will convert it to the text type
first. | Urlencodes a string. | [
"Urlencodes",
"a",
"string",
"."
] | def percent_encode(input_str, safe=SAFE_CHARS):
"""Urlencodes a string.
Whereas percent_encode_sequence handles taking a dict/sequence and
producing a percent encoded string, this function deals only with
taking a string (not a dict/sequence) and percent encoding it.
If given the binary type, will simply URL encode it. If given the
text type, will produce the binary type by UTF-8 encoding the
text. If given something else, will convert it to the text type
first.
"""
# If its not a binary or text string, make it a text string.
if not isinstance(input_str, (six.binary_type, six.text_type)):
input_str = six.text_type(input_str)
# If it's not bytes, make it bytes by UTF-8 encoding it.
if not isinstance(input_str, six.binary_type):
input_str = input_str.encode('utf-8')
return quote(input_str, safe=safe) | [
"def",
"percent_encode",
"(",
"input_str",
",",
"safe",
"=",
"SAFE_CHARS",
")",
":",
"# If its not a binary or text string, make it a text string.",
"if",
"not",
"isinstance",
"(",
"input_str",
",",
"(",
"six",
".",
"binary_type",
",",
"six",
".",
"text_type",
")",
")",
":",
"input_str",
"=",
"six",
".",
"text_type",
"(",
"input_str",
")",
"# If it's not bytes, make it bytes by UTF-8 encoding it.",
"if",
"not",
"isinstance",
"(",
"input_str",
",",
"six",
".",
"binary_type",
")",
":",
"input_str",
"=",
"input_str",
".",
"encode",
"(",
"'utf-8'",
")",
"return",
"quote",
"(",
"input_str",
",",
"safe",
"=",
"safe",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/utils.py#L572-L590 |
|
panda3d/panda3d | 833ad89ebad58395d0af0b7ec08538e5e4308265 | direct/src/directtools/DirectSession.py | python | DirectSession.getAndSetName | (self, nodePath) | Prompt user for new node path name | Prompt user for new node path name | [
"Prompt",
"user",
"for",
"new",
"node",
"path",
"name"
] | def getAndSetName(self, nodePath):
""" Prompt user for new node path name """
from tkinter.simpledialog import askstring
newName = askstring('Node Path: ' + nodePath.getName(),
'Enter new name:')
if newName:
nodePath.setName(newName)
messenger.send('DIRECT_nodePathSetName', [nodePath, newName]) | [
"def",
"getAndSetName",
"(",
"self",
",",
"nodePath",
")",
":",
"from",
"tkinter",
".",
"simpledialog",
"import",
"askstring",
"newName",
"=",
"askstring",
"(",
"'Node Path: '",
"+",
"nodePath",
".",
"getName",
"(",
")",
",",
"'Enter new name:'",
")",
"if",
"newName",
":",
"nodePath",
".",
"setName",
"(",
"newName",
")",
"messenger",
".",
"send",
"(",
"'DIRECT_nodePathSetName'",
",",
"[",
"nodePath",
",",
"newName",
"]",
")"
] | https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/direct/src/directtools/DirectSession.py#L942-L949 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cuda/intrinsic_wrapper.py | python | shfl_down_sync | (mask, value, delta) | return numba.cuda.shfl_sync_intrinsic(mask, 2, value, delta, 0x1f)[0] | Shuffles value across the masked warp and returns the value
from (laneid + delta). If this is outside the warp, then the
given value is returned. | Shuffles value across the masked warp and returns the value
from (laneid + delta). If this is outside the warp, then the
given value is returned. | [
"Shuffles",
"value",
"across",
"the",
"masked",
"warp",
"and",
"returns",
"the",
"value",
"from",
"(",
"laneid",
"+",
"delta",
")",
".",
"If",
"this",
"is",
"outside",
"the",
"warp",
"then",
"the",
"given",
"value",
"is",
"returned",
"."
] | def shfl_down_sync(mask, value, delta):
"""
Shuffles value across the masked warp and returns the value
from (laneid + delta). If this is outside the warp, then the
given value is returned.
"""
return numba.cuda.shfl_sync_intrinsic(mask, 2, value, delta, 0x1f)[0] | [
"def",
"shfl_down_sync",
"(",
"mask",
",",
"value",
",",
"delta",
")",
":",
"return",
"numba",
".",
"cuda",
".",
"shfl_sync_intrinsic",
"(",
"mask",
",",
"2",
",",
"value",
",",
"delta",
",",
"0x1f",
")",
"[",
"0",
"]"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cuda/intrinsic_wrapper.py#L63-L69 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/x86/toolchain/lib/python2.7/decimal.py | python | Context.shift | (self, a, b) | return a.shift(b, context=self) | Returns a shifted copy of a, b times.
The coefficient of the result is a shifted copy of the digits
in the coefficient of the first operand. The number of places
to shift is taken from the absolute value of the second operand,
with the shift being to the left if the second operand is
positive or to the right otherwise. Digits shifted into the
coefficient are zeros.
>>> ExtendedContext.shift(Decimal('34'), Decimal('8'))
Decimal('400000000')
>>> ExtendedContext.shift(Decimal('12'), Decimal('9'))
Decimal('0')
>>> ExtendedContext.shift(Decimal('123456789'), Decimal('-2'))
Decimal('1234567')
>>> ExtendedContext.shift(Decimal('123456789'), Decimal('0'))
Decimal('123456789')
>>> ExtendedContext.shift(Decimal('123456789'), Decimal('+2'))
Decimal('345678900')
>>> ExtendedContext.shift(88888888, 2)
Decimal('888888800')
>>> ExtendedContext.shift(Decimal(88888888), 2)
Decimal('888888800')
>>> ExtendedContext.shift(88888888, Decimal(2))
Decimal('888888800') | Returns a shifted copy of a, b times. | [
"Returns",
"a",
"shifted",
"copy",
"of",
"a",
"b",
"times",
"."
] | def shift(self, a, b):
"""Returns a shifted copy of a, b times.
The coefficient of the result is a shifted copy of the digits
in the coefficient of the first operand. The number of places
to shift is taken from the absolute value of the second operand,
with the shift being to the left if the second operand is
positive or to the right otherwise. Digits shifted into the
coefficient are zeros.
>>> ExtendedContext.shift(Decimal('34'), Decimal('8'))
Decimal('400000000')
>>> ExtendedContext.shift(Decimal('12'), Decimal('9'))
Decimal('0')
>>> ExtendedContext.shift(Decimal('123456789'), Decimal('-2'))
Decimal('1234567')
>>> ExtendedContext.shift(Decimal('123456789'), Decimal('0'))
Decimal('123456789')
>>> ExtendedContext.shift(Decimal('123456789'), Decimal('+2'))
Decimal('345678900')
>>> ExtendedContext.shift(88888888, 2)
Decimal('888888800')
>>> ExtendedContext.shift(Decimal(88888888), 2)
Decimal('888888800')
>>> ExtendedContext.shift(88888888, Decimal(2))
Decimal('888888800')
"""
a = _convert_other(a, raiseit=True)
return a.shift(b, context=self) | [
"def",
"shift",
"(",
"self",
",",
"a",
",",
"b",
")",
":",
"a",
"=",
"_convert_other",
"(",
"a",
",",
"raiseit",
"=",
"True",
")",
"return",
"a",
".",
"shift",
"(",
"b",
",",
"context",
"=",
"self",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/decimal.py#L5255-L5283 |
|
Tencent/TNN | 7acca99f54c55747b415a4c57677403eebc7b706 | third_party/flatbuffers/python/flatbuffers/flexbuffers.py | python | BitWidth.I | (value) | return BitWidth.U(value if value >= 0 else ~value) | Returns the minimum `BitWidth` to encode signed integer value. | Returns the minimum `BitWidth` to encode signed integer value. | [
"Returns",
"the",
"minimum",
"BitWidth",
"to",
"encode",
"signed",
"integer",
"value",
"."
] | def I(value):
"""Returns the minimum `BitWidth` to encode signed integer value."""
# -2^(n-1) <= value < 2^(n-1)
# -2^n <= 2 * value < 2^n
# 2 * value < 2^n, when value >= 0 or 2 * (-value) <= 2^n, when value < 0
# 2 * value < 2^n, when value >= 0 or 2 * (-value) - 1 < 2^n, when value < 0
#
# if value >= 0:
# return BitWidth.U(2 * value)
# else:
# return BitWidth.U(2 * (-value) - 1) # ~x = -x - 1
value *= 2
return BitWidth.U(value if value >= 0 else ~value) | [
"def",
"I",
"(",
"value",
")",
":",
"# -2^(n-1) <= value < 2^(n-1)",
"# -2^n <= 2 * value < 2^n",
"# 2 * value < 2^n, when value >= 0 or 2 * (-value) <= 2^n, when value < 0",
"# 2 * value < 2^n, when value >= 0 or 2 * (-value) - 1 < 2^n, when value < 0",
"#",
"# if value >= 0:",
"# return BitWidth.U(2 * value)",
"# else:",
"# return BitWidth.U(2 * (-value) - 1) # ~x = -x - 1",
"value",
"*=",
"2",
"return",
"BitWidth",
".",
"U",
"(",
"value",
"if",
"value",
">=",
"0",
"else",
"~",
"value",
")"
] | https://github.com/Tencent/TNN/blob/7acca99f54c55747b415a4c57677403eebc7b706/third_party/flatbuffers/python/flatbuffers/flexbuffers.py#L61-L73 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib-tk/Tkinter.py | python | Menu.invoke | (self, index) | return self.tk.call(self._w, 'invoke', index) | Invoke a menu item identified by INDEX and execute
the associated command. | Invoke a menu item identified by INDEX and execute
the associated command. | [
"Invoke",
"a",
"menu",
"item",
"identified",
"by",
"INDEX",
"and",
"execute",
"the",
"associated",
"command",
"."
] | def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
the associated command."""
return self.tk.call(self._w, 'invoke', index) | [
"def",
"invoke",
"(",
"self",
",",
"index",
")",
":",
"return",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_w",
",",
"'invoke'",
",",
"index",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib-tk/Tkinter.py#L2736-L2739 |
|
LiquidPlayer/LiquidCore | 9405979363f2353ac9a71ad8ab59685dd7f919c9 | deps/node-10.15.3/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py | python | CalculateVariables | (default_variables, params) | Calculate additional variables for use in the build (called by gyp). | Calculate additional variables for use in the build (called by gyp). | [
"Calculate",
"additional",
"variables",
"for",
"use",
"in",
"the",
"build",
"(",
"called",
"by",
"gyp",
")",
"."
] | def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
default_variables.setdefault('SHARED_LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
default_variables.setdefault('LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Make generator.
import gyp.generator.xcode as xcode_generator
global generator_additional_non_configuration_keys
generator_additional_non_configuration_keys = getattr(xcode_generator,
'generator_additional_non_configuration_keys', [])
global generator_additional_path_sections
generator_additional_path_sections = getattr(xcode_generator,
'generator_additional_path_sections', [])
global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'})
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
if flavor == 'aix':
default_variables.setdefault('SHARED_LIB_SUFFIX', '.a')
else:
default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)')
default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)') | [
"def",
"CalculateVariables",
"(",
"default_variables",
",",
"params",
")",
":",
"flavor",
"=",
"gyp",
".",
"common",
".",
"GetFlavor",
"(",
"params",
")",
"if",
"flavor",
"==",
"'mac'",
":",
"default_variables",
".",
"setdefault",
"(",
"'OS'",
",",
"'mac'",
")",
"default_variables",
".",
"setdefault",
"(",
"'SHARED_LIB_SUFFIX'",
",",
"'.dylib'",
")",
"default_variables",
".",
"setdefault",
"(",
"'SHARED_LIB_DIR'",
",",
"generator_default_variables",
"[",
"'PRODUCT_DIR'",
"]",
")",
"default_variables",
".",
"setdefault",
"(",
"'LIB_DIR'",
",",
"generator_default_variables",
"[",
"'PRODUCT_DIR'",
"]",
")",
"# Copy additional generator configuration data from Xcode, which is shared",
"# by the Mac Make generator.",
"import",
"gyp",
".",
"generator",
".",
"xcode",
"as",
"xcode_generator",
"global",
"generator_additional_non_configuration_keys",
"generator_additional_non_configuration_keys",
"=",
"getattr",
"(",
"xcode_generator",
",",
"'generator_additional_non_configuration_keys'",
",",
"[",
"]",
")",
"global",
"generator_additional_path_sections",
"generator_additional_path_sections",
"=",
"getattr",
"(",
"xcode_generator",
",",
"'generator_additional_path_sections'",
",",
"[",
"]",
")",
"global",
"generator_extra_sources_for_rules",
"generator_extra_sources_for_rules",
"=",
"getattr",
"(",
"xcode_generator",
",",
"'generator_extra_sources_for_rules'",
",",
"[",
"]",
")",
"COMPILABLE_EXTENSIONS",
".",
"update",
"(",
"{",
"'.m'",
":",
"'objc'",
",",
"'.mm'",
":",
"'objcxx'",
"}",
")",
"else",
":",
"operating_system",
"=",
"flavor",
"if",
"flavor",
"==",
"'android'",
":",
"operating_system",
"=",
"'linux'",
"# Keep this legacy behavior for now.",
"default_variables",
".",
"setdefault",
"(",
"'OS'",
",",
"operating_system",
")",
"if",
"flavor",
"==",
"'aix'",
":",
"default_variables",
".",
"setdefault",
"(",
"'SHARED_LIB_SUFFIX'",
",",
"'.a'",
")",
"else",
":",
"default_variables",
".",
"setdefault",
"(",
"'SHARED_LIB_SUFFIX'",
",",
"'.so'",
")",
"default_variables",
".",
"setdefault",
"(",
"'SHARED_LIB_DIR'",
",",
"'$(builddir)/lib.$(TOOLSET)'",
")",
"default_variables",
".",
"setdefault",
"(",
"'LIB_DIR'",
",",
"'$(obj).$(TOOLSET)'",
")"
] | https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py#L66-L100 |
||
thalium/icebox | 99d147d5b9269222225443ce171b4fd46d8985d4 | third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py | python | uCSIsCatL | (code) | return ret | Check whether the character is part of L UCS Category | Check whether the character is part of L UCS Category | [
"Check",
"whether",
"the",
"character",
"is",
"part",
"of",
"L",
"UCS",
"Category"
] | def uCSIsCatL(code):
"""Check whether the character is part of L UCS Category """
ret = libxml2mod.xmlUCSIsCatL(code)
return ret | [
"def",
"uCSIsCatL",
"(",
"code",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlUCSIsCatL",
"(",
"code",
")",
"return",
"ret"
] | https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py#L2271-L2274 |
|
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/Maya_AnimationRiggingTools/ARTv1/MayaTools/General/Scripts/ART_skeletonBuilder_UI.py | python | SkeletonBuilder_UI.buildFinalSkeleton | (self) | Here we build the joints, I have forked it to treat the facial joint movers a little differently
As the facial rigging has already come in, I will parent con | Here we build the joints, I have forked it to treat the facial joint movers a little differently
As the facial rigging has already come in, I will parent con | [
"Here",
"we",
"build",
"the",
"joints",
"I",
"have",
"forked",
"it",
"to",
"treat",
"the",
"facial",
"joint",
"movers",
"a",
"little",
"differently",
"As",
"the",
"facial",
"rigging",
"has",
"already",
"come",
"in",
"I",
"will",
"parent",
"con"
] | def buildFinalSkeleton(self):
'''
Here we build the joints, I have forked it to treat the facial joint movers a little differently
As the facial rigging has already come in, I will parent con
'''
#hide and detach the facial mask from the joint movers
for f in utils.getUType('faceModule'):
currentFace = face.FaceModule(faceNode=f)
if currentFace.faceMask.active:
self.deactivateMasks()
currentFace.faceMask.deleteSdkParentConstraints()
#TODO: Replace these when going back
# Turn off aim so that it bakes down any changes we may have done with it.
self.jointMover_aimModeOff()
cmds.lockNode("JointMover", lock = False)
cmds.rename("root", "joint_mover_root")
#build the root
cmds.select(clear = True)
joint = cmds.joint(name = "root")
constraint = cmds.parentConstraint("root_lra", joint)[0]
cmds.delete(constraint)
cmds.select(clear = True)
#is filled in the next block and then used to run a parenting loop after
parentInfo = []
#remove any removed facial joint movers
faceModules = [face.FaceModule(faceNode=f) for f in utils.getUType('faceModule')]
for fm in faceModules:
currentActiveMovers = fm.activeJointMovers
for mover in self.geoMovers:
if utils.attrExists(mover + '.lra'):
if mover not in currentActiveMovers:
self.geoMovers.pop(self.geoMovers.index(mover))
for mover in self.geoMovers:
mover = mover.partition("|")[0]
children = cmds.listRelatives(mover, children = True, type = "transform")
if children != None or mover.find("ball") == 0:
jointName = None
moverParent = None
lra = None
jointParent = None
joint = None
## FACIAL
#branch the code and deal with facial joint movers differently
if utils.attrExists(mover + '.lra'):
moverGrp = mover + '_grp'
moverParent = cmds.listRelatives(moverGrp, parent = True)[0]
lra = cmds.listConnections(mover + '.lra')[0]
jointName = mover.replace('_mover','')
jointParent = moverParent.partition("_mover")[0]
cmds.select(clear = True)
joint = cmds.joint(name = jointName)
#snap it to the position/orientation
sdk = cmds.listConnections(mover + '.sdk')[0]
cmds.delete(cmds.parentConstraint(sdk, joint))
cmds.makeIdentity(joint, r = 1, apply = True)
cmds.select(clear = True)
parentInfo.append([joint, jointParent])
## END FACIAL
#treat as normal (jeremy's code below)
else:
print mover
prefix = mover.partition("_geo_mover")[0]
suffix = mover.partition("_geo_mover")[2]
lra = prefix + suffix + "_lra"
jointName = prefix + suffix
moverGrp = prefix + "_mover" + suffix + "_grp"
moverParent = cmds.listRelatives(moverGrp, parent = True)[0]
jpPrefix = moverParent.partition("_mover")[0]
jpSuffix = moverParent.partition("_mover")[2]
jointParent = jpPrefix + jpSuffix
#create and parent the joints
cmds.select(clear = True)
joint = cmds.joint(name = jointName)
#snap into place
cmds.delete(cmds.parentConstraint(lra, joint)[0])
cmds.select(clear = True)
parentInfo.append([joint, jointParent])
cmds.makeIdentity(joint, r = 1, apply = True)
#validate that all these joints exist as we just concatenated their names
for node in [jointName, moverParent, lra]:
if not cmds.objExists(node):
cmds.error('ART_skeletonBuilder_UI:buildFinalSkeleton: Cannot find node with name: ' + node)
#TODO: long names will be invalidated here
for item in parentInfo:
joint = item[0]
parent = item[1]
if parent != "":
cmds.parent(joint, parent)
#duplicate the root, delete the old root, and rename the dupe root
newRoot = cmds.duplicate("root")[0]
cmds.delete("root")
cmds.rename(newRoot, "root") | [
"def",
"buildFinalSkeleton",
"(",
"self",
")",
":",
"#hide and detach the facial mask from the joint movers",
"for",
"f",
"in",
"utils",
".",
"getUType",
"(",
"'faceModule'",
")",
":",
"currentFace",
"=",
"face",
".",
"FaceModule",
"(",
"faceNode",
"=",
"f",
")",
"if",
"currentFace",
".",
"faceMask",
".",
"active",
":",
"self",
".",
"deactivateMasks",
"(",
")",
"currentFace",
".",
"faceMask",
".",
"deleteSdkParentConstraints",
"(",
")",
"#TODO: Replace these when going back",
"# Turn off aim so that it bakes down any changes we may have done with it.",
"self",
".",
"jointMover_aimModeOff",
"(",
")",
"cmds",
".",
"lockNode",
"(",
"\"JointMover\"",
",",
"lock",
"=",
"False",
")",
"cmds",
".",
"rename",
"(",
"\"root\"",
",",
"\"joint_mover_root\"",
")",
"#build the root",
"cmds",
".",
"select",
"(",
"clear",
"=",
"True",
")",
"joint",
"=",
"cmds",
".",
"joint",
"(",
"name",
"=",
"\"root\"",
")",
"constraint",
"=",
"cmds",
".",
"parentConstraint",
"(",
"\"root_lra\"",
",",
"joint",
")",
"[",
"0",
"]",
"cmds",
".",
"delete",
"(",
"constraint",
")",
"cmds",
".",
"select",
"(",
"clear",
"=",
"True",
")",
"#is filled in the next block and then used to run a parenting loop after",
"parentInfo",
"=",
"[",
"]",
"#remove any removed facial joint movers",
"faceModules",
"=",
"[",
"face",
".",
"FaceModule",
"(",
"faceNode",
"=",
"f",
")",
"for",
"f",
"in",
"utils",
".",
"getUType",
"(",
"'faceModule'",
")",
"]",
"for",
"fm",
"in",
"faceModules",
":",
"currentActiveMovers",
"=",
"fm",
".",
"activeJointMovers",
"for",
"mover",
"in",
"self",
".",
"geoMovers",
":",
"if",
"utils",
".",
"attrExists",
"(",
"mover",
"+",
"'.lra'",
")",
":",
"if",
"mover",
"not",
"in",
"currentActiveMovers",
":",
"self",
".",
"geoMovers",
".",
"pop",
"(",
"self",
".",
"geoMovers",
".",
"index",
"(",
"mover",
")",
")",
"for",
"mover",
"in",
"self",
".",
"geoMovers",
":",
"mover",
"=",
"mover",
".",
"partition",
"(",
"\"|\"",
")",
"[",
"0",
"]",
"children",
"=",
"cmds",
".",
"listRelatives",
"(",
"mover",
",",
"children",
"=",
"True",
",",
"type",
"=",
"\"transform\"",
")",
"if",
"children",
"!=",
"None",
"or",
"mover",
".",
"find",
"(",
"\"ball\"",
")",
"==",
"0",
":",
"jointName",
"=",
"None",
"moverParent",
"=",
"None",
"lra",
"=",
"None",
"jointParent",
"=",
"None",
"joint",
"=",
"None",
"## FACIAL",
"#branch the code and deal with facial joint movers differently",
"if",
"utils",
".",
"attrExists",
"(",
"mover",
"+",
"'.lra'",
")",
":",
"moverGrp",
"=",
"mover",
"+",
"'_grp'",
"moverParent",
"=",
"cmds",
".",
"listRelatives",
"(",
"moverGrp",
",",
"parent",
"=",
"True",
")",
"[",
"0",
"]",
"lra",
"=",
"cmds",
".",
"listConnections",
"(",
"mover",
"+",
"'.lra'",
")",
"[",
"0",
"]",
"jointName",
"=",
"mover",
".",
"replace",
"(",
"'_mover'",
",",
"''",
")",
"jointParent",
"=",
"moverParent",
".",
"partition",
"(",
"\"_mover\"",
")",
"[",
"0",
"]",
"cmds",
".",
"select",
"(",
"clear",
"=",
"True",
")",
"joint",
"=",
"cmds",
".",
"joint",
"(",
"name",
"=",
"jointName",
")",
"#snap it to the position/orientation",
"sdk",
"=",
"cmds",
".",
"listConnections",
"(",
"mover",
"+",
"'.sdk'",
")",
"[",
"0",
"]",
"cmds",
".",
"delete",
"(",
"cmds",
".",
"parentConstraint",
"(",
"sdk",
",",
"joint",
")",
")",
"cmds",
".",
"makeIdentity",
"(",
"joint",
",",
"r",
"=",
"1",
",",
"apply",
"=",
"True",
")",
"cmds",
".",
"select",
"(",
"clear",
"=",
"True",
")",
"parentInfo",
".",
"append",
"(",
"[",
"joint",
",",
"jointParent",
"]",
")",
"## END FACIAL",
"#treat as normal (jeremy's code below)",
"else",
":",
"print",
"mover",
"prefix",
"=",
"mover",
".",
"partition",
"(",
"\"_geo_mover\"",
")",
"[",
"0",
"]",
"suffix",
"=",
"mover",
".",
"partition",
"(",
"\"_geo_mover\"",
")",
"[",
"2",
"]",
"lra",
"=",
"prefix",
"+",
"suffix",
"+",
"\"_lra\"",
"jointName",
"=",
"prefix",
"+",
"suffix",
"moverGrp",
"=",
"prefix",
"+",
"\"_mover\"",
"+",
"suffix",
"+",
"\"_grp\"",
"moverParent",
"=",
"cmds",
".",
"listRelatives",
"(",
"moverGrp",
",",
"parent",
"=",
"True",
")",
"[",
"0",
"]",
"jpPrefix",
"=",
"moverParent",
".",
"partition",
"(",
"\"_mover\"",
")",
"[",
"0",
"]",
"jpSuffix",
"=",
"moverParent",
".",
"partition",
"(",
"\"_mover\"",
")",
"[",
"2",
"]",
"jointParent",
"=",
"jpPrefix",
"+",
"jpSuffix",
"#create and parent the joints",
"cmds",
".",
"select",
"(",
"clear",
"=",
"True",
")",
"joint",
"=",
"cmds",
".",
"joint",
"(",
"name",
"=",
"jointName",
")",
"#snap into place",
"cmds",
".",
"delete",
"(",
"cmds",
".",
"parentConstraint",
"(",
"lra",
",",
"joint",
")",
"[",
"0",
"]",
")",
"cmds",
".",
"select",
"(",
"clear",
"=",
"True",
")",
"parentInfo",
".",
"append",
"(",
"[",
"joint",
",",
"jointParent",
"]",
")",
"cmds",
".",
"makeIdentity",
"(",
"joint",
",",
"r",
"=",
"1",
",",
"apply",
"=",
"True",
")",
"#validate that all these joints exist as we just concatenated their names",
"for",
"node",
"in",
"[",
"jointName",
",",
"moverParent",
",",
"lra",
"]",
":",
"if",
"not",
"cmds",
".",
"objExists",
"(",
"node",
")",
":",
"cmds",
".",
"error",
"(",
"'ART_skeletonBuilder_UI:buildFinalSkeleton: Cannot find node with name: '",
"+",
"node",
")",
"#TODO: long names will be invalidated here",
"for",
"item",
"in",
"parentInfo",
":",
"joint",
"=",
"item",
"[",
"0",
"]",
"parent",
"=",
"item",
"[",
"1",
"]",
"if",
"parent",
"!=",
"\"\"",
":",
"cmds",
".",
"parent",
"(",
"joint",
",",
"parent",
")",
"#duplicate the root, delete the old root, and rename the dupe root",
"newRoot",
"=",
"cmds",
".",
"duplicate",
"(",
"\"root\"",
")",
"[",
"0",
"]",
"cmds",
".",
"delete",
"(",
"\"root\"",
")",
"cmds",
".",
"rename",
"(",
"newRoot",
",",
"\"root\"",
")"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/Maya_AnimationRiggingTools/ARTv1/MayaTools/General/Scripts/ART_skeletonBuilder_UI.py#L9115-L9229 |
||
BlzFans/wke | b0fa21158312e40c5fbd84682d643022b6c34a93 | cygwin/lib/python2.6/decimal.py | python | _div_nearest | (a, b) | return q + (2*r + (q&1) > b) | Closest integer to a/b, a and b positive integers; rounds to even
in the case of a tie. | Closest integer to a/b, a and b positive integers; rounds to even
in the case of a tie. | [
"Closest",
"integer",
"to",
"a",
"/",
"b",
"a",
"and",
"b",
"positive",
"integers",
";",
"rounds",
"to",
"even",
"in",
"the",
"case",
"of",
"a",
"tie",
"."
] | def _div_nearest(a, b):
"""Closest integer to a/b, a and b positive integers; rounds to even
in the case of a tie.
"""
q, r = divmod(a, b)
return q + (2*r + (q&1) > b) | [
"def",
"_div_nearest",
"(",
"a",
",",
"b",
")",
":",
"q",
",",
"r",
"=",
"divmod",
"(",
"a",
",",
"b",
")",
"return",
"q",
"+",
"(",
"2",
"*",
"r",
"+",
"(",
"q",
"&",
"1",
")",
">",
"b",
")"
] | https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/decimal.py#L5024-L5030 |
|
cyberbotics/webots | af7fa7d68dcf7b4550f1f2e132092b41e83698fc | resources/osm_importer/webots_objects/speed_limit.py | python | SpeedLimit.__init__ | (self, country) | Constructor: display a warning if the country is not supported. | Constructor: display a warning if the country is not supported. | [
"Constructor",
":",
"display",
"a",
"warning",
"if",
"the",
"country",
"is",
"not",
"supported",
"."
] | def __init__(self, country):
"""Constructor: display a warning if the country is not supported."""
self.country = country # Expected: 'CH', 'US', etc.
if country is not None and country not in ['FR', 'DE', 'JP', 'CH', 'GB', 'US']:
print('Warning: The exported Road.speedLimit may be wrong because the maxspeed rules is not'
' yet implemented for this country.')
print(' Supported countries:')
print(' - France')
print(' - Germany(default)')
print(' - Japan')
print(' - Switzerland')
print(' - United Kingdom')
print(' - United States')
print('Please contact [email protected] to support your country (you may also safely ignore this warning).') | [
"def",
"__init__",
"(",
"self",
",",
"country",
")",
":",
"self",
".",
"country",
"=",
"country",
"# Expected: 'CH', 'US', etc.",
"if",
"country",
"is",
"not",
"None",
"and",
"country",
"not",
"in",
"[",
"'FR'",
",",
"'DE'",
",",
"'JP'",
",",
"'CH'",
",",
"'GB'",
",",
"'US'",
"]",
":",
"print",
"(",
"'Warning: The exported Road.speedLimit may be wrong because the maxspeed rules is not'",
"' yet implemented for this country.'",
")",
"print",
"(",
"' Supported countries:'",
")",
"print",
"(",
"' - France'",
")",
"print",
"(",
"' - Germany(default)'",
")",
"print",
"(",
"' - Japan'",
")",
"print",
"(",
"' - Switzerland'",
")",
"print",
"(",
"' - United Kingdom'",
")",
"print",
"(",
"' - United States'",
")",
"print",
"(",
"'Please contact [email protected] to support your country (you may also safely ignore this warning).'",
")"
] | https://github.com/cyberbotics/webots/blob/af7fa7d68dcf7b4550f1f2e132092b41e83698fc/resources/osm_importer/webots_objects/speed_limit.py#L22-L36 |
||
cvxpy/cvxpy | 5165b4fb750dfd237de8659383ef24b4b2e33aaf | cvxpy/atoms/elementwise/minimum.py | python | minimum.is_atom_log_log_concave | (self) | return True | Is the atom log-log concave? | Is the atom log-log concave? | [
"Is",
"the",
"atom",
"log",
"-",
"log",
"concave?"
] | def is_atom_log_log_concave(self) -> bool:
"""Is the atom log-log concave?
"""
return True | [
"def",
"is_atom_log_log_concave",
"(",
"self",
")",
"->",
"bool",
":",
"return",
"True"
] | https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/atoms/elementwise/minimum.py#L64-L67 |
|
echronos/echronos | c996f1d2c8af6c6536205eb319c1bf1d4d84569c | external_tools/pystache/renderer.py | python | Renderer.register_formatter | (self, specifier, function) | Register a specific function as the formatter for a given specifier. | Register a specific function as the formatter for a given specifier. | [
"Register",
"a",
"specific",
"function",
"as",
"the",
"formatter",
"for",
"a",
"given",
"specifier",
"."
] | def register_formatter(self, specifier, function):
"""Register a specific function as the formatter for a given specifier."""
assert callable(function)
self.formatters[specifier] = function | [
"def",
"register_formatter",
"(",
"self",
",",
"specifier",
",",
"function",
")",
":",
"assert",
"callable",
"(",
"function",
")",
"self",
".",
"formatters",
"[",
"specifier",
"]",
"=",
"function"
] | https://github.com/echronos/echronos/blob/c996f1d2c8af6c6536205eb319c1bf1d4d84569c/external_tools/pystache/renderer.py#L373-L376 |
||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/compiler/xla/xla.py | python | XLACompileContext.AddValue | (self, val) | return result | Add `val` to the current context and its outer context recursively. | Add `val` to the current context and its outer context recursively. | [
"Add",
"val",
"to",
"the",
"current",
"context",
"and",
"its",
"outer",
"context",
"recursively",
"."
] | def AddValue(self, val):
"""Add `val` to the current context and its outer context recursively."""
if val.name in self._values:
# Use the real value if it comes from outer context.
result = self._external_values.get(val.name)
return val if result is None else result
result = val
self._values.add(val.name)
if self._outer_context:
result = self._outer_context.AddValue(val)
self._values.add(result.name)
self._external_values[val.name] = result
return result | [
"def",
"AddValue",
"(",
"self",
",",
"val",
")",
":",
"if",
"val",
".",
"name",
"in",
"self",
".",
"_values",
":",
"# Use the real value if it comes from outer context.",
"result",
"=",
"self",
".",
"_external_values",
".",
"get",
"(",
"val",
".",
"name",
")",
"return",
"val",
"if",
"result",
"is",
"None",
"else",
"result",
"result",
"=",
"val",
"self",
".",
"_values",
".",
"add",
"(",
"val",
".",
"name",
")",
"if",
"self",
".",
"_outer_context",
":",
"result",
"=",
"self",
".",
"_outer_context",
".",
"AddValue",
"(",
"val",
")",
"self",
".",
"_values",
".",
"add",
"(",
"result",
".",
"name",
")",
"self",
".",
"_external_values",
"[",
"val",
".",
"name",
"]",
"=",
"result",
"return",
"result"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/compiler/xla/xla.py#L268-L283 |
|
Smorodov/Multitarget-tracker | bee300e8bfd660c86cbeb6892c65a5b7195c9381 | thirdparty/pybind11/tools/clang/cindex.py | python | Config.set_compatibility_check | (check_status) | Perform compatibility check when loading libclang
The python bindings are only tested and evaluated with the version of
libclang they are provided with. To ensure correct behavior a (limited)
compatibility check is performed when loading the bindings. This check
will throw an exception, as soon as it fails.
In case these bindings are used with an older version of libclang, parts
that have been stable between releases may still work. Users of the
python bindings can disable the compatibility check. This will cause
the python bindings to load, even though they are written for a newer
version of libclang. Failures now arise if unsupported or incompatible
features are accessed. The user is required to test themselves if the
features they are using are available and compatible between different
libclang versions. | Perform compatibility check when loading libclang | [
"Perform",
"compatibility",
"check",
"when",
"loading",
"libclang"
] | def set_compatibility_check(check_status):
""" Perform compatibility check when loading libclang
The python bindings are only tested and evaluated with the version of
libclang they are provided with. To ensure correct behavior a (limited)
compatibility check is performed when loading the bindings. This check
will throw an exception, as soon as it fails.
In case these bindings are used with an older version of libclang, parts
that have been stable between releases may still work. Users of the
python bindings can disable the compatibility check. This will cause
the python bindings to load, even though they are written for a newer
version of libclang. Failures now arise if unsupported or incompatible
features are accessed. The user is required to test themselves if the
features they are using are available and compatible between different
libclang versions.
"""
if Config.loaded:
raise Exception("compatibility_check must be set before before " \
"using any other functionalities in libclang.")
Config.compatibility_check = check_status | [
"def",
"set_compatibility_check",
"(",
"check_status",
")",
":",
"if",
"Config",
".",
"loaded",
":",
"raise",
"Exception",
"(",
"\"compatibility_check must be set before before \"",
"\"using any other functionalities in libclang.\"",
")",
"Config",
".",
"compatibility_check",
"=",
"check_status"
] | https://github.com/Smorodov/Multitarget-tracker/blob/bee300e8bfd660c86cbeb6892c65a5b7195c9381/thirdparty/pybind11/tools/clang/cindex.py#L3789-L3810 |
||
okex/V3-Open-API-SDK | c5abb0db7e2287718e0055e17e57672ce0ec7fd9 | okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/html5lib/serializer.py | python | serialize | (input, tree="etree", encoding=None, **serializer_opts) | return s.render(walker(input), encoding) | Serializes the input token stream using the specified treewalker
:arg input: the token stream to serialize
:arg tree: the treewalker to use
:arg encoding: the encoding to use
:arg serializer_opts: any options to pass to the
:py:class:`html5lib.serializer.HTMLSerializer` that gets created
:returns: the tree serialized as a string
Example:
>>> from html5lib.html5parser import parse
>>> from html5lib.serializer import serialize
>>> token_stream = parse('<html><body><p>Hi!</p></body></html>')
>>> serialize(token_stream, omit_optional_tags=False)
'<html><head></head><body><p>Hi!</p></body></html>' | Serializes the input token stream using the specified treewalker | [
"Serializes",
"the",
"input",
"token",
"stream",
"using",
"the",
"specified",
"treewalker"
] | def serialize(input, tree="etree", encoding=None, **serializer_opts):
"""Serializes the input token stream using the specified treewalker
:arg input: the token stream to serialize
:arg tree: the treewalker to use
:arg encoding: the encoding to use
:arg serializer_opts: any options to pass to the
:py:class:`html5lib.serializer.HTMLSerializer` that gets created
:returns: the tree serialized as a string
Example:
>>> from html5lib.html5parser import parse
>>> from html5lib.serializer import serialize
>>> token_stream = parse('<html><body><p>Hi!</p></body></html>')
>>> serialize(token_stream, omit_optional_tags=False)
'<html><head></head><body><p>Hi!</p></body></html>'
"""
# XXX: Should we cache this?
walker = treewalkers.getTreeWalker(tree)
s = HTMLSerializer(**serializer_opts)
return s.render(walker(input), encoding) | [
"def",
"serialize",
"(",
"input",
",",
"tree",
"=",
"\"etree\"",
",",
"encoding",
"=",
"None",
",",
"*",
"*",
"serializer_opts",
")",
":",
"# XXX: Should we cache this?",
"walker",
"=",
"treewalkers",
".",
"getTreeWalker",
"(",
"tree",
")",
"s",
"=",
"HTMLSerializer",
"(",
"*",
"*",
"serializer_opts",
")",
"return",
"s",
".",
"render",
"(",
"walker",
"(",
"input",
")",
",",
"encoding",
")"
] | https://github.com/okex/V3-Open-API-SDK/blob/c5abb0db7e2287718e0055e17e57672ce0ec7fd9/okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/html5lib/serializer.py#L75-L101 |
|
RobotLocomotion/drake | 0e18a34604c45ed65bc9018a54f7610f91cdad5b | bindings/pydrake/systems/system_sliders.py | python | SystemSliders.__init__ | (self, port_size, slider_names=None,
lower_limit=-10., upper_limit=10.,
resolution=-1, length=200, update_period_sec=0.0166,
window=None, title="System inputs") | Args:
port_size: Size of the input port that's being controlled. This
is the number of sliders that will show up.
slider_names: A list of strings describing the names of the sliders
that should be displayed.
lower_limit: The value(s) for the lower limits of each slider. See
class documentation for more details.
upper_limit: The value(s) for the upper limits of each slider. See
class documentation for more details.
resolution: A scalar or vector of length port_size
that specifies the discretization that the slider will
be rounded to. Use -1 (the default) to disable any
rounding. For example, a resolution of 0.1 will round
to the nearest 0.1. See class documentation for more
details.
length: The length of the sliders in pixels.
update_period_sec: Specifies how often the window update() method
gets called. Smaller values will theoretically make
GUI values available to the simulation more quickly,
but may require the simulation to take more steps than
necessary. The default value is suitable for most
applications.
window: Optionally pass in a tkinter.Tk() object to add these
widgets to. Default behavior is to create a new
window.
title: The string that appears as the title of the gui
window. Default title is "System sliders" This
parameter is only used if window is None. | Args:
port_size: Size of the input port that's being controlled. This
is the number of sliders that will show up.
slider_names: A list of strings describing the names of the sliders
that should be displayed.
lower_limit: The value(s) for the lower limits of each slider. See
class documentation for more details.
upper_limit: The value(s) for the upper limits of each slider. See
class documentation for more details.
resolution: A scalar or vector of length port_size
that specifies the discretization that the slider will
be rounded to. Use -1 (the default) to disable any
rounding. For example, a resolution of 0.1 will round
to the nearest 0.1. See class documentation for more
details.
length: The length of the sliders in pixels.
update_period_sec: Specifies how often the window update() method
gets called. Smaller values will theoretically make
GUI values available to the simulation more quickly,
but may require the simulation to take more steps than
necessary. The default value is suitable for most
applications.
window: Optionally pass in a tkinter.Tk() object to add these
widgets to. Default behavior is to create a new
window.
title: The string that appears as the title of the gui
window. Default title is "System sliders" This
parameter is only used if window is None. | [
"Args",
":",
"port_size",
":",
"Size",
"of",
"the",
"input",
"port",
"that",
"s",
"being",
"controlled",
".",
"This",
"is",
"the",
"number",
"of",
"sliders",
"that",
"will",
"show",
"up",
".",
"slider_names",
":",
"A",
"list",
"of",
"strings",
"describing",
"the",
"names",
"of",
"the",
"sliders",
"that",
"should",
"be",
"displayed",
".",
"lower_limit",
":",
"The",
"value",
"(",
"s",
")",
"for",
"the",
"lower",
"limits",
"of",
"each",
"slider",
".",
"See",
"class",
"documentation",
"for",
"more",
"details",
".",
"upper_limit",
":",
"The",
"value",
"(",
"s",
")",
"for",
"the",
"upper",
"limits",
"of",
"each",
"slider",
".",
"See",
"class",
"documentation",
"for",
"more",
"details",
".",
"resolution",
":",
"A",
"scalar",
"or",
"vector",
"of",
"length",
"port_size",
"that",
"specifies",
"the",
"discretization",
"that",
"the",
"slider",
"will",
"be",
"rounded",
"to",
".",
"Use",
"-",
"1",
"(",
"the",
"default",
")",
"to",
"disable",
"any",
"rounding",
".",
"For",
"example",
"a",
"resolution",
"of",
"0",
".",
"1",
"will",
"round",
"to",
"the",
"nearest",
"0",
".",
"1",
".",
"See",
"class",
"documentation",
"for",
"more",
"details",
".",
"length",
":",
"The",
"length",
"of",
"the",
"sliders",
"in",
"pixels",
".",
"update_period_sec",
":",
"Specifies",
"how",
"often",
"the",
"window",
"update",
"()",
"method",
"gets",
"called",
".",
"Smaller",
"values",
"will",
"theoretically",
"make",
"GUI",
"values",
"available",
"to",
"the",
"simulation",
"more",
"quickly",
"but",
"may",
"require",
"the",
"simulation",
"to",
"take",
"more",
"steps",
"than",
"necessary",
".",
"The",
"default",
"value",
"is",
"suitable",
"for",
"most",
"applications",
".",
"window",
":",
"Optionally",
"pass",
"in",
"a",
"tkinter",
".",
"Tk",
"()",
"object",
"to",
"add",
"these",
"widgets",
"to",
".",
"Default",
"behavior",
"is",
"to",
"create",
"a",
"new",
"window",
".",
"title",
":",
"The",
"string",
"that",
"appears",
"as",
"the",
"title",
"of",
"the",
"gui",
"window",
".",
"Default",
"title",
"is",
"System",
"sliders",
"This",
"parameter",
"is",
"only",
"used",
"if",
"window",
"is",
"None",
"."
] | def __init__(self, port_size, slider_names=None,
lower_limit=-10., upper_limit=10.,
resolution=-1, length=200, update_period_sec=0.0166,
window=None, title="System inputs"):
"""
Args:
port_size: Size of the input port that's being controlled. This
is the number of sliders that will show up.
slider_names: A list of strings describing the names of the sliders
that should be displayed.
lower_limit: The value(s) for the lower limits of each slider. See
class documentation for more details.
upper_limit: The value(s) for the upper limits of each slider. See
class documentation for more details.
resolution: A scalar or vector of length port_size
that specifies the discretization that the slider will
be rounded to. Use -1 (the default) to disable any
rounding. For example, a resolution of 0.1 will round
to the nearest 0.1. See class documentation for more
details.
length: The length of the sliders in pixels.
update_period_sec: Specifies how often the window update() method
gets called. Smaller values will theoretically make
GUI values available to the simulation more quickly,
but may require the simulation to take more steps than
necessary. The default value is suitable for most
applications.
window: Optionally pass in a tkinter.Tk() object to add these
widgets to. Default behavior is to create a new
window.
title: The string that appears as the title of the gui
window. Default title is "System sliders" This
parameter is only used if window is None.
"""
VectorSystem.__init__(self, 0, port_size)
if window is None:
self.window = tk.Tk()
self.window.title(title)
else:
self.window = window
self.port_size = port_size
if slider_names is None:
slider_names = ["Index " + str(i) for i in range(self.port_size)]
if len(slider_names) != self.port_size:
raise ValueError(
f"Slider names size ({len(slider_names)}) doesn't "
f"match port size ({self.port_size})")
def input_to_vector(x, desc):
"""
Turn scalar inputs into vector of size self.port_size.
Throws error if vector input is the wrong size,
otherwise returning the vector.
Args:
x: scalar or vector input.
desc: string describing the vector, used in error message.
"""
if np.isscalar(x):
return np.repeat(x, self.port_size)
if len(x) == self.port_size:
return x
raise ValueError(
f"Size of {desc} ({len(x)}) doesn't "
f"match port size ({self.port_size})"
)
lower_limit = input_to_vector(lower_limit, "lower_limit")
upper_limit = input_to_vector(upper_limit, "upper_limit")
resolution = input_to_vector(resolution, "resolution")
# Schedule window updates in either case (new or existing window):
self.DeclarePeriodicEvent(update_period_sec, 0.0,
PublishEvent(self._update_window))
self._sliders = []
# TODO: support a scroll bar for larger input sizes
for i in range(self.port_size):
slider = tk.Scale(self.window,
from_=lower_limit[i],
to=upper_limit[i],
resolution=resolution[i],
label=slider_names[i],
length=length,
orient=tk.HORIZONTAL)
slider.pack()
self._sliders.append(slider) | [
"def",
"__init__",
"(",
"self",
",",
"port_size",
",",
"slider_names",
"=",
"None",
",",
"lower_limit",
"=",
"-",
"10.",
",",
"upper_limit",
"=",
"10.",
",",
"resolution",
"=",
"-",
"1",
",",
"length",
"=",
"200",
",",
"update_period_sec",
"=",
"0.0166",
",",
"window",
"=",
"None",
",",
"title",
"=",
"\"System inputs\"",
")",
":",
"VectorSystem",
".",
"__init__",
"(",
"self",
",",
"0",
",",
"port_size",
")",
"if",
"window",
"is",
"None",
":",
"self",
".",
"window",
"=",
"tk",
".",
"Tk",
"(",
")",
"self",
".",
"window",
".",
"title",
"(",
"title",
")",
"else",
":",
"self",
".",
"window",
"=",
"window",
"self",
".",
"port_size",
"=",
"port_size",
"if",
"slider_names",
"is",
"None",
":",
"slider_names",
"=",
"[",
"\"Index \"",
"+",
"str",
"(",
"i",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"port_size",
")",
"]",
"if",
"len",
"(",
"slider_names",
")",
"!=",
"self",
".",
"port_size",
":",
"raise",
"ValueError",
"(",
"f\"Slider names size ({len(slider_names)}) doesn't \"",
"f\"match port size ({self.port_size})\"",
")",
"def",
"input_to_vector",
"(",
"x",
",",
"desc",
")",
":",
"\"\"\"\n Turn scalar inputs into vector of size self.port_size.\n Throws error if vector input is the wrong size,\n otherwise returning the vector.\n\n Args:\n x: scalar or vector input.\n desc: string describing the vector, used in error message.\n \"\"\"",
"if",
"np",
".",
"isscalar",
"(",
"x",
")",
":",
"return",
"np",
".",
"repeat",
"(",
"x",
",",
"self",
".",
"port_size",
")",
"if",
"len",
"(",
"x",
")",
"==",
"self",
".",
"port_size",
":",
"return",
"x",
"raise",
"ValueError",
"(",
"f\"Size of {desc} ({len(x)}) doesn't \"",
"f\"match port size ({self.port_size})\"",
")",
"lower_limit",
"=",
"input_to_vector",
"(",
"lower_limit",
",",
"\"lower_limit\"",
")",
"upper_limit",
"=",
"input_to_vector",
"(",
"upper_limit",
",",
"\"upper_limit\"",
")",
"resolution",
"=",
"input_to_vector",
"(",
"resolution",
",",
"\"resolution\"",
")",
"# Schedule window updates in either case (new or existing window):",
"self",
".",
"DeclarePeriodicEvent",
"(",
"update_period_sec",
",",
"0.0",
",",
"PublishEvent",
"(",
"self",
".",
"_update_window",
")",
")",
"self",
".",
"_sliders",
"=",
"[",
"]",
"# TODO: support a scroll bar for larger input sizes",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"port_size",
")",
":",
"slider",
"=",
"tk",
".",
"Scale",
"(",
"self",
".",
"window",
",",
"from_",
"=",
"lower_limit",
"[",
"i",
"]",
",",
"to",
"=",
"upper_limit",
"[",
"i",
"]",
",",
"resolution",
"=",
"resolution",
"[",
"i",
"]",
",",
"label",
"=",
"slider_names",
"[",
"i",
"]",
",",
"length",
"=",
"length",
",",
"orient",
"=",
"tk",
".",
"HORIZONTAL",
")",
"slider",
".",
"pack",
"(",
")",
"self",
".",
"_sliders",
".",
"append",
"(",
"slider",
")"
] | https://github.com/RobotLocomotion/drake/blob/0e18a34604c45ed65bc9018a54f7610f91cdad5b/bindings/pydrake/systems/system_sliders.py#L31-L123 |
||
CRYTEK/CRYENGINE | 232227c59a220cbbd311576f0fbeba7bb53b2a8c | Code/Tools/waf-1.7.13/crywaflib/compile_settings_linux_x64.py | python | load_performance_linux_x64_settings | (conf) | Setup all compiler and linker settings shared over all linux_x64 configurations for
the 'performance' configuration | Setup all compiler and linker settings shared over all linux_x64 configurations for
the 'performance' configuration | [
"Setup",
"all",
"compiler",
"and",
"linker",
"settings",
"shared",
"over",
"all",
"linux_x64",
"configurations",
"for",
"the",
"performance",
"configuration"
] | def load_performance_linux_x64_settings(conf):
"""
Setup all compiler and linker settings shared over all linux_x64 configurations for
the 'performance' configuration
"""
v = conf.env
load_linux_x64_common_settings(v) | [
"def",
"load_performance_linux_x64_settings",
"(",
"conf",
")",
":",
"v",
"=",
"conf",
".",
"env",
"load_linux_x64_common_settings",
"(",
"v",
")"
] | https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Code/Tools/waf-1.7.13/crywaflib/compile_settings_linux_x64.py#L31-L37 |
||
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/python/estimator/gc.py | python | _union | (lf, rf) | return keep | Creates a filter that keeps the union of two filters.
Args:
lf: first filter
rf: second filter
Returns:
A filter function that keeps the n largest paths. | Creates a filter that keeps the union of two filters. | [
"Creates",
"a",
"filter",
"that",
"keeps",
"the",
"union",
"of",
"two",
"filters",
"."
] | def _union(lf, rf):
"""Creates a filter that keeps the union of two filters.
Args:
lf: first filter
rf: second filter
Returns:
A filter function that keeps the n largest paths.
"""
def keep(paths):
l = set(lf(paths))
r = set(rf(paths))
return sorted(list(l|r))
return keep | [
"def",
"_union",
"(",
"lf",
",",
"rf",
")",
":",
"def",
"keep",
"(",
"paths",
")",
":",
"l",
"=",
"set",
"(",
"lf",
"(",
"paths",
")",
")",
"r",
"=",
"set",
"(",
"rf",
"(",
"paths",
")",
")",
"return",
"sorted",
"(",
"list",
"(",
"l",
"|",
"r",
")",
")",
"return",
"keep"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/estimator/gc.py#L149-L163 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/inspect.py | python | getargvalues | (frame) | return ArgInfo(args, varargs, varkw, frame.f_locals) | Get information about arguments passed into a particular frame.
A tuple of four things is returned: (args, varargs, varkw, locals).
'args' is a list of the argument names (it may contain nested lists).
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'locals' is the locals dictionary of the given frame. | Get information about arguments passed into a particular frame. | [
"Get",
"information",
"about",
"arguments",
"passed",
"into",
"a",
"particular",
"frame",
"."
] | def getargvalues(frame):
"""Get information about arguments passed into a particular frame.
A tuple of four things is returned: (args, varargs, varkw, locals).
'args' is a list of the argument names (it may contain nested lists).
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'locals' is the locals dictionary of the given frame."""
args, varargs, varkw = getargs(frame.f_code)
return ArgInfo(args, varargs, varkw, frame.f_locals) | [
"def",
"getargvalues",
"(",
"frame",
")",
":",
"args",
",",
"varargs",
",",
"varkw",
"=",
"getargs",
"(",
"frame",
".",
"f_code",
")",
"return",
"ArgInfo",
"(",
"args",
",",
"varargs",
",",
"varkw",
",",
"frame",
".",
"f_locals",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/inspect.py#L831-L839 |
|
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | deps/src/libxml2-2.9.1/python/libxml2class.py | python | uCSIsCatPd | (code) | return ret | Check whether the character is part of Pd UCS Category | Check whether the character is part of Pd UCS Category | [
"Check",
"whether",
"the",
"character",
"is",
"part",
"of",
"Pd",
"UCS",
"Category"
] | def uCSIsCatPd(code):
"""Check whether the character is part of Pd UCS Category """
ret = libxml2mod.xmlUCSIsCatPd(code)
return ret | [
"def",
"uCSIsCatPd",
"(",
"code",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlUCSIsCatPd",
"(",
"code",
")",
"return",
"ret"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L1563-L1566 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/tools/Editra/src/eclib/panelbox.py | python | PanelBoxItemBase.OnKeyUp | (self, evt) | Handle key navigation events | Handle key navigation events | [
"Handle",
"key",
"navigation",
"events"
] | def OnKeyUp(self, evt):
"""Handle key navigation events"""
self.GetParent().OnNavigate(evt)
evt.Skip() | [
"def",
"OnKeyUp",
"(",
"self",
",",
"evt",
")",
":",
"self",
".",
"GetParent",
"(",
")",
".",
"OnNavigate",
"(",
"evt",
")",
"evt",
".",
"Skip",
"(",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/eclib/panelbox.py#L297-L300 |
||
goldeneye-source/ges-code | 2630cd8ef3d015af53c72ec2e19fc1f7e7fe8d9d | thirdparty/protobuf-2.3.0/python/google/protobuf/internal/containers.py | python | RepeatedScalarFieldContainer.__delslice__ | (self, start, stop) | Deletes the subset of items from between the specified indices. | Deletes the subset of items from between the specified indices. | [
"Deletes",
"the",
"subset",
"of",
"items",
"from",
"between",
"the",
"specified",
"indices",
"."
] | def __delslice__(self, start, stop):
"""Deletes the subset of items from between the specified indices."""
del self._values[start:stop]
self._message_listener.Modified() | [
"def",
"__delslice__",
"(",
"self",
",",
"start",
",",
"stop",
")",
":",
"del",
"self",
".",
"_values",
"[",
"start",
":",
"stop",
"]",
"self",
".",
"_message_listener",
".",
"Modified",
"(",
")"
] | https://github.com/goldeneye-source/ges-code/blob/2630cd8ef3d015af53c72ec2e19fc1f7e7fe8d9d/thirdparty/protobuf-2.3.0/python/google/protobuf/internal/containers.py#L160-L163 |
||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/distribute/multi_process_lib.py | python | _set_spawn_exe_path | () | Set the path to the executable for spawned processes.
This utility searches for the binary the parent process is using, and sets
the executable of multiprocessing's context accordingly.
Raises:
RuntimeError: If the binary path cannot be determined. | Set the path to the executable for spawned processes. | [
"Set",
"the",
"path",
"to",
"the",
"executable",
"for",
"spawned",
"processes",
"."
] | def _set_spawn_exe_path():
"""Set the path to the executable for spawned processes.
This utility searches for the binary the parent process is using, and sets
the executable of multiprocessing's context accordingly.
Raises:
RuntimeError: If the binary path cannot be determined.
"""
# TODO(b/150264776): This does not work with Windows. Find a solution.
if sys.argv[0].endswith('.py'):
def guess_path(package_root):
# If all we have is a python module path, we'll need to make a guess for
# the actual executable path.
if 'bazel-out' in sys.argv[0] and package_root in sys.argv[0]:
# Guess the binary path under bazel. For target
# //tensorflow/python/distribute:input_lib_test_multiworker_gpu, the
# argv[0] is in the form of
# /.../tensorflow/python/distribute/input_lib_test.py
# and the binary is
# /.../tensorflow/python/distribute/input_lib_test_multiworker_gpu
package_root_base = sys.argv[0][:sys.argv[0].rfind(package_root)]
binary = os.environ['TEST_TARGET'][2:].replace(':', '/', 1)
possible_path = os.path.join(package_root_base, package_root,
binary)
logging.info('Guessed test binary path: %s', possible_path)
if os.access(possible_path, os.X_OK):
return possible_path
return None
path = guess_path('org_tensorflow')
if not path:
path = guess_path('org_keras')
if path is None:
logging.error(
'Cannot determine binary path. sys.argv[0]=%s os.environ=%s',
sys.argv[0], os.environ)
raise RuntimeError('Cannot determine binary path')
sys.argv[0] = path
# Note that this sets the executable for *all* contexts.
multiprocessing.get_context().set_executable(sys.argv[0]) | [
"def",
"_set_spawn_exe_path",
"(",
")",
":",
"# TODO(b/150264776): This does not work with Windows. Find a solution.",
"if",
"sys",
".",
"argv",
"[",
"0",
"]",
".",
"endswith",
"(",
"'.py'",
")",
":",
"def",
"guess_path",
"(",
"package_root",
")",
":",
"# If all we have is a python module path, we'll need to make a guess for",
"# the actual executable path.",
"if",
"'bazel-out'",
"in",
"sys",
".",
"argv",
"[",
"0",
"]",
"and",
"package_root",
"in",
"sys",
".",
"argv",
"[",
"0",
"]",
":",
"# Guess the binary path under bazel. For target",
"# //tensorflow/python/distribute:input_lib_test_multiworker_gpu, the",
"# argv[0] is in the form of",
"# /.../tensorflow/python/distribute/input_lib_test.py",
"# and the binary is",
"# /.../tensorflow/python/distribute/input_lib_test_multiworker_gpu",
"package_root_base",
"=",
"sys",
".",
"argv",
"[",
"0",
"]",
"[",
":",
"sys",
".",
"argv",
"[",
"0",
"]",
".",
"rfind",
"(",
"package_root",
")",
"]",
"binary",
"=",
"os",
".",
"environ",
"[",
"'TEST_TARGET'",
"]",
"[",
"2",
":",
"]",
".",
"replace",
"(",
"':'",
",",
"'/'",
",",
"1",
")",
"possible_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"package_root_base",
",",
"package_root",
",",
"binary",
")",
"logging",
".",
"info",
"(",
"'Guessed test binary path: %s'",
",",
"possible_path",
")",
"if",
"os",
".",
"access",
"(",
"possible_path",
",",
"os",
".",
"X_OK",
")",
":",
"return",
"possible_path",
"return",
"None",
"path",
"=",
"guess_path",
"(",
"'org_tensorflow'",
")",
"if",
"not",
"path",
":",
"path",
"=",
"guess_path",
"(",
"'org_keras'",
")",
"if",
"path",
"is",
"None",
":",
"logging",
".",
"error",
"(",
"'Cannot determine binary path. sys.argv[0]=%s os.environ=%s'",
",",
"sys",
".",
"argv",
"[",
"0",
"]",
",",
"os",
".",
"environ",
")",
"raise",
"RuntimeError",
"(",
"'Cannot determine binary path'",
")",
"sys",
".",
"argv",
"[",
"0",
"]",
"=",
"path",
"# Note that this sets the executable for *all* contexts.",
"multiprocessing",
".",
"get_context",
"(",
")",
".",
"set_executable",
"(",
"sys",
".",
"argv",
"[",
"0",
"]",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/distribute/multi_process_lib.py#L87-L126 |
||
apache/qpid-proton | 6bcdfebb55ea3554bc29b1901422532db331a591 | python/proton/_handlers.py | python | IncomingMessageHandler.on_message | (self, event: Event) | Called when a message is received. The message itself can be
obtained as a property on the event. For the purpose of
referring to this message in further actions (e.g. if
explicitly accepting it, the ``delivery`` should be used, also
obtainable via a property on the event.
:param event: The underlying event object. Use this to obtain further
information on the event. | Called when a message is received. The message itself can be
obtained as a property on the event. For the purpose of
referring to this message in further actions (e.g. if
explicitly accepting it, the ``delivery`` should be used, also
obtainable via a property on the event. | [
"Called",
"when",
"a",
"message",
"is",
"received",
".",
"The",
"message",
"itself",
"can",
"be",
"obtained",
"as",
"a",
"property",
"on",
"the",
"event",
".",
"For",
"the",
"purpose",
"of",
"referring",
"to",
"this",
"message",
"in",
"further",
"actions",
"(",
"e",
".",
"g",
".",
"if",
"explicitly",
"accepting",
"it",
"the",
"delivery",
"should",
"be",
"used",
"also",
"obtainable",
"via",
"a",
"property",
"on",
"the",
"event",
"."
] | def on_message(self, event: Event):
"""
Called when a message is received. The message itself can be
obtained as a property on the event. For the purpose of
referring to this message in further actions (e.g. if
explicitly accepting it, the ``delivery`` should be used, also
obtainable via a property on the event.
:param event: The underlying event object. Use this to obtain further
information on the event.
"""
if self.delegate is not None:
_dispatch(self.delegate, 'on_message', event) | [
"def",
"on_message",
"(",
"self",
",",
"event",
":",
"Event",
")",
":",
"if",
"self",
".",
"delegate",
"is",
"not",
"None",
":",
"_dispatch",
"(",
"self",
".",
"delegate",
",",
"'on_message'",
",",
"event",
")"
] | https://github.com/apache/qpid-proton/blob/6bcdfebb55ea3554bc29b1901422532db331a591/python/proton/_handlers.py#L267-L279 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py3/scipy/sparse/compressed.py | python | _cs_matrix.sorted_indices | (self) | return A | Return a copy of this matrix with sorted indices | Return a copy of this matrix with sorted indices | [
"Return",
"a",
"copy",
"of",
"this",
"matrix",
"with",
"sorted",
"indices"
] | def sorted_indices(self):
"""Return a copy of this matrix with sorted indices
"""
A = self.copy()
A.sort_indices()
return A | [
"def",
"sorted_indices",
"(",
"self",
")",
":",
"A",
"=",
"self",
".",
"copy",
"(",
")",
"A",
".",
"sort_indices",
"(",
")",
"return",
"A"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/sparse/compressed.py#L1058-L1063 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/fpformat.py | python | unexpo | (intpart, fraction, expo) | return intpart, fraction | Remove the exponent by changing intpart and fraction. | Remove the exponent by changing intpart and fraction. | [
"Remove",
"the",
"exponent",
"by",
"changing",
"intpart",
"and",
"fraction",
"."
] | def unexpo(intpart, fraction, expo):
"""Remove the exponent by changing intpart and fraction."""
if expo > 0: # Move the point left
f = len(fraction)
intpart, fraction = intpart + fraction[:expo], fraction[expo:]
if expo > f:
intpart = intpart + '0'*(expo-f)
elif expo < 0: # Move the point right
i = len(intpart)
intpart, fraction = intpart[:expo], intpart[expo:] + fraction
if expo < -i:
fraction = '0'*(-expo-i) + fraction
return intpart, fraction | [
"def",
"unexpo",
"(",
"intpart",
",",
"fraction",
",",
"expo",
")",
":",
"if",
"expo",
">",
"0",
":",
"# Move the point left",
"f",
"=",
"len",
"(",
"fraction",
")",
"intpart",
",",
"fraction",
"=",
"intpart",
"+",
"fraction",
"[",
":",
"expo",
"]",
",",
"fraction",
"[",
"expo",
":",
"]",
"if",
"expo",
">",
"f",
":",
"intpart",
"=",
"intpart",
"+",
"'0'",
"*",
"(",
"expo",
"-",
"f",
")",
"elif",
"expo",
"<",
"0",
":",
"# Move the point right",
"i",
"=",
"len",
"(",
"intpart",
")",
"intpart",
",",
"fraction",
"=",
"intpart",
"[",
":",
"expo",
"]",
",",
"intpart",
"[",
"expo",
":",
"]",
"+",
"fraction",
"if",
"expo",
"<",
"-",
"i",
":",
"fraction",
"=",
"'0'",
"*",
"(",
"-",
"expo",
"-",
"i",
")",
"+",
"fraction",
"return",
"intpart",
",",
"fraction"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/fpformat.py#L50-L62 |
|
bairdzhang/smallhardface | 76fa1d87a9602d9b13d7a7fe693fc7aec91cab80 | caffe/scripts/cpp_lint.py | python | _SetOutputFormat | (output_format) | Sets the module's output format. | Sets the module's output format. | [
"Sets",
"the",
"module",
"s",
"output",
"format",
"."
] | def _SetOutputFormat(output_format):
"""Sets the module's output format."""
_cpplint_state.SetOutputFormat(output_format) | [
"def",
"_SetOutputFormat",
"(",
"output_format",
")",
":",
"_cpplint_state",
".",
"SetOutputFormat",
"(",
"output_format",
")"
] | https://github.com/bairdzhang/smallhardface/blob/76fa1d87a9602d9b13d7a7fe693fc7aec91cab80/caffe/scripts/cpp_lint.py#L776-L778 |
||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | scripts/SANS/isis_reduction_steps.py | python | UnitsConvert.get_rebin | (self) | return self._get_rebin(self.wav_low, self.wav_step, self.wav_high) | Get the string that is passed as the "param" property to Rebin
@return the string that is passed to Rebin | Get the string that is passed as the "param" property to Rebin | [
"Get",
"the",
"string",
"that",
"is",
"passed",
"as",
"the",
"param",
"property",
"to",
"Rebin"
] | def get_rebin(self):
"""
Get the string that is passed as the "param" property to Rebin
@return the string that is passed to Rebin
"""
return self._get_rebin(self.wav_low, self.wav_step, self.wav_high) | [
"def",
"get_rebin",
"(",
"self",
")",
":",
"return",
"self",
".",
"_get_rebin",
"(",
"self",
".",
"wav_low",
",",
"self",
".",
"wav_step",
",",
"self",
".",
"wav_high",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/SANS/isis_reduction_steps.py#L3082-L3087 |
|
chuckcho/video-caffe | fc232b3e3a90ea22dd041b9fc5c542f170581f20 | python/caffe/draw.py | python | get_layer_label | (layer, rankdir, display_lrm=False) | return node_label | Define node label based on layer type.
Parameters
----------
layer : caffe_pb2.LayerParameter
rankdir : {'LR', 'TB', 'BT'}
Direction of graph layout.
display_lrm : boolean, optional
If True include the learning rate multipliers in the label (default is
False).
Returns
-------
node_label : string
A label for the current layer | Define node label based on layer type. | [
"Define",
"node",
"label",
"based",
"on",
"layer",
"type",
"."
] | def get_layer_label(layer, rankdir, display_lrm=False):
"""Define node label based on layer type.
Parameters
----------
layer : caffe_pb2.LayerParameter
rankdir : {'LR', 'TB', 'BT'}
Direction of graph layout.
display_lrm : boolean, optional
If True include the learning rate multipliers in the label (default is
False).
Returns
-------
node_label : string
A label for the current layer
"""
if rankdir in ('TB', 'BT'):
# If graph orientation is vertical, horizontal space is free and
# vertical space is not; separate words with spaces
separator = ' '
else:
# If graph orientation is horizontal, vertical space is free and
# horizontal space is not; separate words with newlines
separator = r'\n'
# Initializes a list of descriptors that will be concatenated into the
# `node_label`
descriptors_list = []
# Add the layer's name
descriptors_list.append(layer.name)
# Add layer's type
if layer.type == 'Pooling':
pooling_types_dict = get_pooling_types_dict()
layer_type = '(%s %s)' % (layer.type,
pooling_types_dict[layer.pooling_param.pool])
else:
layer_type = '(%s)' % layer.type
descriptors_list.append(layer_type)
# Describe parameters for spatial operation layers
if layer.type in ['Convolution', 'Deconvolution', 'Pooling']:
if layer.type == 'Pooling':
kernel_size = layer.pooling_param.kernel_size
stride = layer.pooling_param.stride
padding = layer.pooling_param.pad
else:
kernel_size = layer.convolution_param.kernel_size[0] if \
len(layer.convolution_param.kernel_size) else 1
stride = layer.convolution_param.stride[0] if \
len(layer.convolution_param.stride) else 1
padding = layer.convolution_param.pad[0] if \
len(layer.convolution_param.pad) else 0
spatial_descriptor = separator.join([
"kernel size: %d" % kernel_size,
"stride: %d" % stride,
"pad: %d" % padding,
])
descriptors_list.append(spatial_descriptor)
# Add LR multiplier for learning layers
if display_lrm and layer.type in ['Convolution', 'Deconvolution', 'InnerProduct']:
lrm0, lrm1 = get_layer_lr_mult(layer)
if any([lrm0, lrm1]):
lr_mult = "lr mult: %.1f, %.1f" % (lrm0, lrm1)
descriptors_list.append(lr_mult)
# Concatenate the descriptors into one label
node_label = separator.join(descriptors_list)
# Outer double quotes needed or else colon characters don't parse
# properly
node_label = '"%s"' % node_label
return node_label | [
"def",
"get_layer_label",
"(",
"layer",
",",
"rankdir",
",",
"display_lrm",
"=",
"False",
")",
":",
"if",
"rankdir",
"in",
"(",
"'TB'",
",",
"'BT'",
")",
":",
"# If graph orientation is vertical, horizontal space is free and",
"# vertical space is not; separate words with spaces",
"separator",
"=",
"' '",
"else",
":",
"# If graph orientation is horizontal, vertical space is free and",
"# horizontal space is not; separate words with newlines",
"separator",
"=",
"r'\\n'",
"# Initializes a list of descriptors that will be concatenated into the",
"# `node_label`",
"descriptors_list",
"=",
"[",
"]",
"# Add the layer's name",
"descriptors_list",
".",
"append",
"(",
"layer",
".",
"name",
")",
"# Add layer's type",
"if",
"layer",
".",
"type",
"==",
"'Pooling'",
":",
"pooling_types_dict",
"=",
"get_pooling_types_dict",
"(",
")",
"layer_type",
"=",
"'(%s %s)'",
"%",
"(",
"layer",
".",
"type",
",",
"pooling_types_dict",
"[",
"layer",
".",
"pooling_param",
".",
"pool",
"]",
")",
"else",
":",
"layer_type",
"=",
"'(%s)'",
"%",
"layer",
".",
"type",
"descriptors_list",
".",
"append",
"(",
"layer_type",
")",
"# Describe parameters for spatial operation layers",
"if",
"layer",
".",
"type",
"in",
"[",
"'Convolution'",
",",
"'Deconvolution'",
",",
"'Pooling'",
"]",
":",
"if",
"layer",
".",
"type",
"==",
"'Pooling'",
":",
"kernel_size",
"=",
"layer",
".",
"pooling_param",
".",
"kernel_size",
"stride",
"=",
"layer",
".",
"pooling_param",
".",
"stride",
"padding",
"=",
"layer",
".",
"pooling_param",
".",
"pad",
"else",
":",
"kernel_size",
"=",
"layer",
".",
"convolution_param",
".",
"kernel_size",
"[",
"0",
"]",
"if",
"len",
"(",
"layer",
".",
"convolution_param",
".",
"kernel_size",
")",
"else",
"1",
"stride",
"=",
"layer",
".",
"convolution_param",
".",
"stride",
"[",
"0",
"]",
"if",
"len",
"(",
"layer",
".",
"convolution_param",
".",
"stride",
")",
"else",
"1",
"padding",
"=",
"layer",
".",
"convolution_param",
".",
"pad",
"[",
"0",
"]",
"if",
"len",
"(",
"layer",
".",
"convolution_param",
".",
"pad",
")",
"else",
"0",
"spatial_descriptor",
"=",
"separator",
".",
"join",
"(",
"[",
"\"kernel size: %d\"",
"%",
"kernel_size",
",",
"\"stride: %d\"",
"%",
"stride",
",",
"\"pad: %d\"",
"%",
"padding",
",",
"]",
")",
"descriptors_list",
".",
"append",
"(",
"spatial_descriptor",
")",
"# Add LR multiplier for learning layers",
"if",
"display_lrm",
"and",
"layer",
".",
"type",
"in",
"[",
"'Convolution'",
",",
"'Deconvolution'",
",",
"'InnerProduct'",
"]",
":",
"lrm0",
",",
"lrm1",
"=",
"get_layer_lr_mult",
"(",
"layer",
")",
"if",
"any",
"(",
"[",
"lrm0",
",",
"lrm1",
"]",
")",
":",
"lr_mult",
"=",
"\"lr mult: %.1f, %.1f\"",
"%",
"(",
"lrm0",
",",
"lrm1",
")",
"descriptors_list",
".",
"append",
"(",
"lr_mult",
")",
"# Concatenate the descriptors into one label",
"node_label",
"=",
"separator",
".",
"join",
"(",
"descriptors_list",
")",
"# Outer double quotes needed or else colon characters don't parse",
"# properly",
"node_label",
"=",
"'\"%s\"'",
"%",
"node_label",
"return",
"node_label"
] | https://github.com/chuckcho/video-caffe/blob/fc232b3e3a90ea22dd041b9fc5c542f170581f20/python/caffe/draw.py#L101-L174 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib-tk/Tix.py | python | Grid.move_row | (self, from_, to, offset) | Moves the range of rows from position FROM through TO by
the distance indicated by OFFSET.
For example, move_row(2, 4, 1) moves the rows 2,3,4 to rows 3,4,5. | Moves the range of rows from position FROM through TO by
the distance indicated by OFFSET.
For example, move_row(2, 4, 1) moves the rows 2,3,4 to rows 3,4,5. | [
"Moves",
"the",
"range",
"of",
"rows",
"from",
"position",
"FROM",
"through",
"TO",
"by",
"the",
"distance",
"indicated",
"by",
"OFFSET",
".",
"For",
"example",
"move_row",
"(",
"2",
"4",
"1",
")",
"moves",
"the",
"rows",
"2",
"3",
"4",
"to",
"rows",
"3",
"4",
"5",
"."
] | def move_row(self, from_, to, offset):
"""Moves the range of rows from position FROM through TO by
the distance indicated by OFFSET.
For example, move_row(2, 4, 1) moves the rows 2,3,4 to rows 3,4,5."""
self.tk.call(self, 'move', 'row', from_, to, offset) | [
"def",
"move_row",
"(",
"self",
",",
"from_",
",",
"to",
",",
"offset",
")",
":",
"self",
".",
"tk",
".",
"call",
"(",
"self",
",",
"'move'",
",",
"'row'",
",",
"from_",
",",
"to",
",",
"offset",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib-tk/Tix.py#L1882-L1886 |
||
IntelRealSense/librealsense | c94410a420b74e5fb6a414bd12215c05ddd82b69 | wrappers/python/examples/box_dimensioner_multicam/calculate_rmsd_kabsch.py | python | get_coordinates_pdb | (filename) | return atoms, V | Get coordinates from the first chain in a pdb file
and return a vectorset with all the coordinates.
Parameters
----------
filename : string
Filename to read
Returns
-------
atoms : list
List of atomic types
V : array
(N,3) where N is number of atoms | Get coordinates from the first chain in a pdb file
and return a vectorset with all the coordinates. | [
"Get",
"coordinates",
"from",
"the",
"first",
"chain",
"in",
"a",
"pdb",
"file",
"and",
"return",
"a",
"vectorset",
"with",
"all",
"the",
"coordinates",
"."
] | def get_coordinates_pdb(filename):
"""
Get coordinates from the first chain in a pdb file
and return a vectorset with all the coordinates.
Parameters
----------
filename : string
Filename to read
Returns
-------
atoms : list
List of atomic types
V : array
(N,3) where N is number of atoms
"""
# PDB files tend to be a bit of a mess. The x, y and z coordinates
# are supposed to be in column 31-38, 39-46 and 47-54, but this is
# not always the case.
# Because of this the three first columns containing a decimal is used.
# Since the format doesn't require a space between columns, we use the
# above column indices as a fallback.
x_column = None
V = list()
# Same with atoms and atom naming.
# The most robust way to do this is probably
# to assume that the atomtype is given in column 3.
atoms = list()
with open(filename, 'r') as f:
lines = f.readlines()
for line in lines:
if line.startswith("TER") or line.startswith("END"):
break
if line.startswith("ATOM"):
tokens = line.split()
# Try to get the atomtype
try:
atom = tokens[2][0]
if atom in ("H", "C", "N", "O", "S", "P"):
atoms.append(atom)
else:
# e.g. 1HD1
atom = tokens[2][1]
if atom == "H":
atoms.append(atom)
else:
raise Exception
except:
exit("Error parsing atomtype for the following line: \n{0:s}".format(line))
if x_column == None:
try:
# look for x column
for i, x in enumerate(tokens):
if "." in x and "." in tokens[i + 1] and "." in tokens[i + 2]:
x_column = i
break
except IndexError:
exit("Error parsing coordinates for the following line: \n{0:s}".format(line))
# Try to read the coordinates
try:
V.append(np.asarray(tokens[x_column:x_column + 3], dtype=float))
except:
# If that doesn't work, use hardcoded indices
try:
x = line[30:38]
y = line[38:46]
z = line[46:54]
V.append(np.asarray([x, y ,z], dtype=float))
except:
exit("Error parsing input for the following line: \n{0:s}".format(line))
V = np.asarray(V)
atoms = np.asarray(atoms)
assert(V.shape[0] == atoms.size)
return atoms, V | [
"def",
"get_coordinates_pdb",
"(",
"filename",
")",
":",
"# PDB files tend to be a bit of a mess. The x, y and z coordinates",
"# are supposed to be in column 31-38, 39-46 and 47-54, but this is",
"# not always the case.",
"# Because of this the three first columns containing a decimal is used.",
"# Since the format doesn't require a space between columns, we use the",
"# above column indices as a fallback.",
"x_column",
"=",
"None",
"V",
"=",
"list",
"(",
")",
"# Same with atoms and atom naming.",
"# The most robust way to do this is probably",
"# to assume that the atomtype is given in column 3.",
"atoms",
"=",
"list",
"(",
")",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"f",
":",
"lines",
"=",
"f",
".",
"readlines",
"(",
")",
"for",
"line",
"in",
"lines",
":",
"if",
"line",
".",
"startswith",
"(",
"\"TER\"",
")",
"or",
"line",
".",
"startswith",
"(",
"\"END\"",
")",
":",
"break",
"if",
"line",
".",
"startswith",
"(",
"\"ATOM\"",
")",
":",
"tokens",
"=",
"line",
".",
"split",
"(",
")",
"# Try to get the atomtype",
"try",
":",
"atom",
"=",
"tokens",
"[",
"2",
"]",
"[",
"0",
"]",
"if",
"atom",
"in",
"(",
"\"H\"",
",",
"\"C\"",
",",
"\"N\"",
",",
"\"O\"",
",",
"\"S\"",
",",
"\"P\"",
")",
":",
"atoms",
".",
"append",
"(",
"atom",
")",
"else",
":",
"# e.g. 1HD1",
"atom",
"=",
"tokens",
"[",
"2",
"]",
"[",
"1",
"]",
"if",
"atom",
"==",
"\"H\"",
":",
"atoms",
".",
"append",
"(",
"atom",
")",
"else",
":",
"raise",
"Exception",
"except",
":",
"exit",
"(",
"\"Error parsing atomtype for the following line: \\n{0:s}\"",
".",
"format",
"(",
"line",
")",
")",
"if",
"x_column",
"==",
"None",
":",
"try",
":",
"# look for x column",
"for",
"i",
",",
"x",
"in",
"enumerate",
"(",
"tokens",
")",
":",
"if",
"\".\"",
"in",
"x",
"and",
"\".\"",
"in",
"tokens",
"[",
"i",
"+",
"1",
"]",
"and",
"\".\"",
"in",
"tokens",
"[",
"i",
"+",
"2",
"]",
":",
"x_column",
"=",
"i",
"break",
"except",
"IndexError",
":",
"exit",
"(",
"\"Error parsing coordinates for the following line: \\n{0:s}\"",
".",
"format",
"(",
"line",
")",
")",
"# Try to read the coordinates",
"try",
":",
"V",
".",
"append",
"(",
"np",
".",
"asarray",
"(",
"tokens",
"[",
"x_column",
":",
"x_column",
"+",
"3",
"]",
",",
"dtype",
"=",
"float",
")",
")",
"except",
":",
"# If that doesn't work, use hardcoded indices",
"try",
":",
"x",
"=",
"line",
"[",
"30",
":",
"38",
"]",
"y",
"=",
"line",
"[",
"38",
":",
"46",
"]",
"z",
"=",
"line",
"[",
"46",
":",
"54",
"]",
"V",
".",
"append",
"(",
"np",
".",
"asarray",
"(",
"[",
"x",
",",
"y",
",",
"z",
"]",
",",
"dtype",
"=",
"float",
")",
")",
"except",
":",
"exit",
"(",
"\"Error parsing input for the following line: \\n{0:s}\"",
".",
"format",
"(",
"line",
")",
")",
"V",
"=",
"np",
".",
"asarray",
"(",
"V",
")",
"atoms",
"=",
"np",
".",
"asarray",
"(",
"atoms",
")",
"assert",
"(",
"V",
".",
"shape",
"[",
"0",
"]",
"==",
"atoms",
".",
"size",
")",
"return",
"atoms",
",",
"V"
] | https://github.com/IntelRealSense/librealsense/blob/c94410a420b74e5fb6a414bd12215c05ddd82b69/wrappers/python/examples/box_dimensioner_multicam/calculate_rmsd_kabsch.py#L323-L402 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/targets/base.py | python | BaseContext.get_function | (self, fn, sig, _firstcall=True) | Return the implementation of function *fn* for signature *sig*.
The return value is a callable with the signature (builder, args). | Return the implementation of function *fn* for signature *sig*.
The return value is a callable with the signature (builder, args). | [
"Return",
"the",
"implementation",
"of",
"function",
"*",
"fn",
"*",
"for",
"signature",
"*",
"sig",
"*",
".",
"The",
"return",
"value",
"is",
"a",
"callable",
"with",
"the",
"signature",
"(",
"builder",
"args",
")",
"."
] | def get_function(self, fn, sig, _firstcall=True):
"""
Return the implementation of function *fn* for signature *sig*.
The return value is a callable with the signature (builder, args).
"""
assert sig is not None
sig = sig.as_function()
if isinstance(fn, (types.Function, types.BoundFunction,
types.Dispatcher)):
key = fn.get_impl_key(sig)
overloads = self._defns[key]
else:
key = fn
overloads = self._defns[key]
try:
return _wrap_impl(overloads.find(sig.args), self, sig)
except NotImplementedError:
pass
if isinstance(fn, types.Type):
# It's a type instance => try to find a definition for the type class
try:
return self.get_function(type(fn), sig)
except NotImplementedError:
# Raise exception for the type instance, for a better error message
pass
# Automatically refresh the context to load new registries if we are
# calling the first time.
if _firstcall:
self.refresh()
return self.get_function(fn, sig, _firstcall=False)
raise NotImplementedError("No definition for lowering %s%s" % (key, sig)) | [
"def",
"get_function",
"(",
"self",
",",
"fn",
",",
"sig",
",",
"_firstcall",
"=",
"True",
")",
":",
"assert",
"sig",
"is",
"not",
"None",
"sig",
"=",
"sig",
".",
"as_function",
"(",
")",
"if",
"isinstance",
"(",
"fn",
",",
"(",
"types",
".",
"Function",
",",
"types",
".",
"BoundFunction",
",",
"types",
".",
"Dispatcher",
")",
")",
":",
"key",
"=",
"fn",
".",
"get_impl_key",
"(",
"sig",
")",
"overloads",
"=",
"self",
".",
"_defns",
"[",
"key",
"]",
"else",
":",
"key",
"=",
"fn",
"overloads",
"=",
"self",
".",
"_defns",
"[",
"key",
"]",
"try",
":",
"return",
"_wrap_impl",
"(",
"overloads",
".",
"find",
"(",
"sig",
".",
"args",
")",
",",
"self",
",",
"sig",
")",
"except",
"NotImplementedError",
":",
"pass",
"if",
"isinstance",
"(",
"fn",
",",
"types",
".",
"Type",
")",
":",
"# It's a type instance => try to find a definition for the type class",
"try",
":",
"return",
"self",
".",
"get_function",
"(",
"type",
"(",
"fn",
")",
",",
"sig",
")",
"except",
"NotImplementedError",
":",
"# Raise exception for the type instance, for a better error message",
"pass",
"# Automatically refresh the context to load new registries if we are",
"# calling the first time.",
"if",
"_firstcall",
":",
"self",
".",
"refresh",
"(",
")",
"return",
"self",
".",
"get_function",
"(",
"fn",
",",
"sig",
",",
"_firstcall",
"=",
"False",
")",
"raise",
"NotImplementedError",
"(",
"\"No definition for lowering %s%s\"",
"%",
"(",
"key",
",",
"sig",
")",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/targets/base.py#L530-L563 |
||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/general_fitting/general_fitting_model.py | python | GeneralFittingModel._copy_logs | (self, input_workspaces, output_workspace: str) | Copy the logs from the input workspace(s) to the output workspaces. | Copy the logs from the input workspace(s) to the output workspaces. | [
"Copy",
"the",
"logs",
"from",
"the",
"input",
"workspace",
"(",
"s",
")",
"to",
"the",
"output",
"workspaces",
"."
] | def _copy_logs(self, input_workspaces, output_workspace: str) -> None:
"""Copy the logs from the input workspace(s) to the output workspaces."""
if self.fitting_context.number_of_datasets == 1:
CopyLogs(InputWorkspace=input_workspaces[0], OutputWorkspace=output_workspace, StoreInADS=False)
else:
self._copy_logs_for_all_datsets(input_workspaces, output_workspace) | [
"def",
"_copy_logs",
"(",
"self",
",",
"input_workspaces",
",",
"output_workspace",
":",
"str",
")",
"->",
"None",
":",
"if",
"self",
".",
"fitting_context",
".",
"number_of_datasets",
"==",
"1",
":",
"CopyLogs",
"(",
"InputWorkspace",
"=",
"input_workspaces",
"[",
"0",
"]",
",",
"OutputWorkspace",
"=",
"output_workspace",
",",
"StoreInADS",
"=",
"False",
")",
"else",
":",
"self",
".",
"_copy_logs_for_all_datsets",
"(",
"input_workspaces",
",",
"output_workspace",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/general_fitting/general_fitting_model.py#L337-L342 |
||
gimli-org/gimli | 17aa2160de9b15ababd9ef99e89b1bc3277bbb23 | pygimli/physics/gravimetry/gravMagModelling.py | python | GravimetryModelling.setSensorPositions | (self, pnts) | Set measurement locations. [[x,y,z],...]. | Set measurement locations. [[x,y,z],...]. | [
"Set",
"measurement",
"locations",
".",
"[[",
"x",
"y",
"z",
"]",
"...",
"]",
"."
] | def setSensorPositions(self, pnts):
"""Set measurement locations. [[x,y,z],...]."""
self.sensorPositions = pnts | [
"def",
"setSensorPositions",
"(",
"self",
",",
"pnts",
")",
":",
"self",
".",
"sensorPositions",
"=",
"pnts"
] | https://github.com/gimli-org/gimli/blob/17aa2160de9b15ababd9ef99e89b1bc3277bbb23/pygimli/physics/gravimetry/gravMagModelling.py#L765-L767 |
||
emscripten-core/emscripten | 0d413d3c5af8b28349682496edc14656f5700c2f | third_party/ply/example/ansic/cparse.py | python | p_specifier_qualifier_list_3 | (t) | specifier_qualifier_list : type_qualifier specifier_qualifier_list | specifier_qualifier_list : type_qualifier specifier_qualifier_list | [
"specifier_qualifier_list",
":",
"type_qualifier",
"specifier_qualifier_list"
] | def p_specifier_qualifier_list_3(t):
'specifier_qualifier_list : type_qualifier specifier_qualifier_list'
pass | [
"def",
"p_specifier_qualifier_list_3",
"(",
"t",
")",
":",
"pass"
] | https://github.com/emscripten-core/emscripten/blob/0d413d3c5af8b28349682496edc14656f5700c2f/third_party/ply/example/ansic/cparse.py#L197-L199 |
||
PixarAnimationStudios/USD | faed18ce62c8736b02413635b584a2f637156bad | pxr/usdImaging/usdviewq/adjustClipping.py | python | AdjustClipping.paintEvent | (self, paintEvent) | Overridden from base class so we can perform JIT updating
of editors to limit the number of redraws we perform | Overridden from base class so we can perform JIT updating
of editors to limit the number of redraws we perform | [
"Overridden",
"from",
"base",
"class",
"so",
"we",
"can",
"perform",
"JIT",
"updating",
"of",
"editors",
"to",
"limit",
"the",
"number",
"of",
"redraws",
"we",
"perform"
] | def paintEvent(self, paintEvent):
"""Overridden from base class so we can perform JIT updating
of editors to limit the number of redraws we perform"""
self._updateEditorsFromDataModel()
super(AdjustClipping, self).paintEvent(paintEvent) | [
"def",
"paintEvent",
"(",
"self",
",",
"paintEvent",
")",
":",
"self",
".",
"_updateEditorsFromDataModel",
"(",
")",
"super",
"(",
"AdjustClipping",
",",
"self",
")",
".",
"paintEvent",
"(",
"paintEvent",
")"
] | https://github.com/PixarAnimationStudios/USD/blob/faed18ce62c8736b02413635b584a2f637156bad/pxr/usdImaging/usdviewq/adjustClipping.py#L105-L109 |
||
FreeCAD/FreeCAD | ba42231b9c6889b89e064d6d563448ed81e376ec | src/Mod/Draft/DraftGui.py | python | DraftToolBar.checkEnterText | (self) | this function checks if the entered text ends with two blank lines | this function checks if the entered text ends with two blank lines | [
"this",
"function",
"checks",
"if",
"the",
"entered",
"text",
"ends",
"with",
"two",
"blank",
"lines"
] | def checkEnterText(self):
"""this function checks if the entered text ends with two blank lines"""
t = self.textValue.toPlainText()
if t.endswith("\n\n"):
self.sendText() | [
"def",
"checkEnterText",
"(",
"self",
")",
":",
"t",
"=",
"self",
".",
"textValue",
".",
"toPlainText",
"(",
")",
"if",
"t",
".",
"endswith",
"(",
"\"\\n\\n\"",
")",
":",
"self",
".",
"sendText",
"(",
")"
] | https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/DraftGui.py#L1740-L1744 |
||
y123456yz/reading-and-annotate-mongodb-3.6 | 93280293672ca7586dc24af18132aa61e4ed7fcf | mongo/site_scons/site_tools/idl_tool.py | python | idlc_emitter | (target, source, env) | return [target_source, target_header], source | For each input IDL file, the tool produces a .cpp and .h file. | For each input IDL file, the tool produces a .cpp and .h file. | [
"For",
"each",
"input",
"IDL",
"file",
"the",
"tool",
"produces",
"a",
".",
"cpp",
"and",
".",
"h",
"file",
"."
] | def idlc_emitter(target, source, env):
"""For each input IDL file, the tool produces a .cpp and .h file."""
first_source = str(source[0])
if not first_source.endswith(".idl"):
raise ValueError("Bad idl file name '%s', it must end with '.idl' " % (first_source))
base_file_name, _ = SCons.Util.splitext(str(target[0]))
target_source = base_file_name + "_gen.cpp"
target_header = base_file_name + "_gen.h"
env.Alias('generated-sources', [target_source, target_header])
return [target_source, target_header], source | [
"def",
"idlc_emitter",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"first_source",
"=",
"str",
"(",
"source",
"[",
"0",
"]",
")",
"if",
"not",
"first_source",
".",
"endswith",
"(",
"\".idl\"",
")",
":",
"raise",
"ValueError",
"(",
"\"Bad idl file name '%s', it must end with '.idl' \"",
"%",
"(",
"first_source",
")",
")",
"base_file_name",
",",
"_",
"=",
"SCons",
".",
"Util",
".",
"splitext",
"(",
"str",
"(",
"target",
"[",
"0",
"]",
")",
")",
"target_source",
"=",
"base_file_name",
"+",
"\"_gen.cpp\"",
"target_header",
"=",
"base_file_name",
"+",
"\"_gen.h\"",
"env",
".",
"Alias",
"(",
"'generated-sources'",
",",
"[",
"target_source",
",",
"target_header",
"]",
")",
"return",
"[",
"target_source",
",",
"target_header",
"]",
",",
"source"
] | https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/site_scons/site_tools/idl_tool.py#L24-L37 |
|
natanielruiz/android-yolo | 1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f | jni-build/jni/include/tensorflow/contrib/distributions/python/ops/beta.py | python | Beta.allow_nan_stats | (self) | return self._allow_nan_stats | Boolean describing behavior when a stat is undefined for batch member. | Boolean describing behavior when a stat is undefined for batch member. | [
"Boolean",
"describing",
"behavior",
"when",
"a",
"stat",
"is",
"undefined",
"for",
"batch",
"member",
"."
] | def allow_nan_stats(self):
"""Boolean describing behavior when a stat is undefined for batch member."""
return self._allow_nan_stats | [
"def",
"allow_nan_stats",
"(",
"self",
")",
":",
"return",
"self",
".",
"_allow_nan_stats"
] | https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/contrib/distributions/python/ops/beta.py#L174-L176 |
|
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/multiprocessing/util.py | python | get_logger | () | return _logger | Returns logger used by multiprocessing | Returns logger used by multiprocessing | [
"Returns",
"logger",
"used",
"by",
"multiprocessing"
] | def get_logger():
'''
Returns logger used by multiprocessing
'''
global _logger
import logging, atexit
logging._acquireLock()
try:
if not _logger:
_logger = logging.getLogger(LOGGER_NAME)
_logger.propagate = 0
logging.addLevelName(SUBDEBUG, 'SUBDEBUG')
logging.addLevelName(SUBWARNING, 'SUBWARNING')
# XXX multiprocessing should cleanup before logging
if hasattr(atexit, 'unregister'):
atexit.unregister(_exit_function)
atexit.register(_exit_function)
else:
atexit._exithandlers.remove((_exit_function, (), {}))
atexit._exithandlers.append((_exit_function, (), {}))
finally:
logging._releaseLock()
return _logger | [
"def",
"get_logger",
"(",
")",
":",
"global",
"_logger",
"import",
"logging",
",",
"atexit",
"logging",
".",
"_acquireLock",
"(",
")",
"try",
":",
"if",
"not",
"_logger",
":",
"_logger",
"=",
"logging",
".",
"getLogger",
"(",
"LOGGER_NAME",
")",
"_logger",
".",
"propagate",
"=",
"0",
"logging",
".",
"addLevelName",
"(",
"SUBDEBUG",
",",
"'SUBDEBUG'",
")",
"logging",
".",
"addLevelName",
"(",
"SUBWARNING",
",",
"'SUBWARNING'",
")",
"# XXX multiprocessing should cleanup before logging",
"if",
"hasattr",
"(",
"atexit",
",",
"'unregister'",
")",
":",
"atexit",
".",
"unregister",
"(",
"_exit_function",
")",
"atexit",
".",
"register",
"(",
"_exit_function",
")",
"else",
":",
"atexit",
".",
"_exithandlers",
".",
"remove",
"(",
"(",
"_exit_function",
",",
"(",
")",
",",
"{",
"}",
")",
")",
"atexit",
".",
"_exithandlers",
".",
"append",
"(",
"(",
"_exit_function",
",",
"(",
")",
",",
"{",
"}",
")",
")",
"finally",
":",
"logging",
".",
"_releaseLock",
"(",
")",
"return",
"_logger"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/multiprocessing/util.py#L83-L110 |
|
lmb-freiburg/ogn | 974f72ef4bf840d6f6693d22d1843a79223e77ce | scripts/cpp_lint.py | python | ReplaceAll | (pattern, rep, s) | return _regexp_compile_cache[pattern].sub(rep, s) | Replaces instances of pattern in a string with a replacement.
The compiled regex is kept in a cache shared by Match and Search.
Args:
pattern: regex pattern
rep: replacement text
s: search string
Returns:
string with replacements made (or original string if no replacements) | Replaces instances of pattern in a string with a replacement. | [
"Replaces",
"instances",
"of",
"pattern",
"in",
"a",
"string",
"with",
"a",
"replacement",
"."
] | def ReplaceAll(pattern, rep, s):
"""Replaces instances of pattern in a string with a replacement.
The compiled regex is kept in a cache shared by Match and Search.
Args:
pattern: regex pattern
rep: replacement text
s: search string
Returns:
string with replacements made (or original string if no replacements)
"""
if pattern not in _regexp_compile_cache:
_regexp_compile_cache[pattern] = sre_compile.compile(pattern)
return _regexp_compile_cache[pattern].sub(rep, s) | [
"def",
"ReplaceAll",
"(",
"pattern",
",",
"rep",
",",
"s",
")",
":",
"if",
"pattern",
"not",
"in",
"_regexp_compile_cache",
":",
"_regexp_compile_cache",
"[",
"pattern",
"]",
"=",
"sre_compile",
".",
"compile",
"(",
"pattern",
")",
"return",
"_regexp_compile_cache",
"[",
"pattern",
"]",
".",
"sub",
"(",
"rep",
",",
"s",
")"
] | https://github.com/lmb-freiburg/ogn/blob/974f72ef4bf840d6f6693d22d1843a79223e77ce/scripts/cpp_lint.py#L525-L540 |
|
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/saved_model/builder_impl.py | python | _maybe_save_assets | (write_fn, assets_to_add=None) | return asset_filename_map | Saves assets to the meta graph.
Args:
write_fn: A function callback that writes assets into meta graph.
assets_to_add: The list where the asset paths are setup.
Returns:
A dict of asset basenames for saving to the original full path to the asset.
Raises:
ValueError: Indicating an invalid filepath tensor. | Saves assets to the meta graph. | [
"Saves",
"assets",
"to",
"the",
"meta",
"graph",
"."
] | def _maybe_save_assets(write_fn, assets_to_add=None):
"""Saves assets to the meta graph.
Args:
write_fn: A function callback that writes assets into meta graph.
assets_to_add: The list where the asset paths are setup.
Returns:
A dict of asset basenames for saving to the original full path to the asset.
Raises:
ValueError: Indicating an invalid filepath tensor.
"""
# Map of target file names to original filenames
asset_filename_map = {}
if assets_to_add is None:
tf_logging.info("No assets to save.")
return asset_filename_map
# Iterate over the supplied assets, build the `AssetFile` proto and add them
# to the meta graph.
for asset_tensor in assets_to_add:
asset_source_filepath = _asset_path_from_tensor(asset_tensor)
if not asset_source_filepath:
raise ValueError(f"Asset filepath tensor {asset_tensor} in is invalid.")
asset_filename = get_asset_filename_to_add(
asset_source_filepath, asset_filename_map)
# Call the passed-in function that builds AssetFileDef proto and adds it
# to either the collection or asset_file_def field of the meta graph.
# Note that this should be done even when the file is a duplicate of an
# already-added file, as the tensor reference should still exist.
write_fn(asset_filename, asset_tensor)
# In the cases where we are adding a duplicate, this will result in the
# last of the filepaths being the one used for copying the file to the
# SavedModel. Since the files in question are the same, it doesn't matter
# either way.
asset_filename_map[asset_filename] = asset_source_filepath
tf_logging.info("Assets added to graph.")
return asset_filename_map | [
"def",
"_maybe_save_assets",
"(",
"write_fn",
",",
"assets_to_add",
"=",
"None",
")",
":",
"# Map of target file names to original filenames",
"asset_filename_map",
"=",
"{",
"}",
"if",
"assets_to_add",
"is",
"None",
":",
"tf_logging",
".",
"info",
"(",
"\"No assets to save.\"",
")",
"return",
"asset_filename_map",
"# Iterate over the supplied assets, build the `AssetFile` proto and add them",
"# to the meta graph.",
"for",
"asset_tensor",
"in",
"assets_to_add",
":",
"asset_source_filepath",
"=",
"_asset_path_from_tensor",
"(",
"asset_tensor",
")",
"if",
"not",
"asset_source_filepath",
":",
"raise",
"ValueError",
"(",
"f\"Asset filepath tensor {asset_tensor} in is invalid.\"",
")",
"asset_filename",
"=",
"get_asset_filename_to_add",
"(",
"asset_source_filepath",
",",
"asset_filename_map",
")",
"# Call the passed-in function that builds AssetFileDef proto and adds it",
"# to either the collection or asset_file_def field of the meta graph.",
"# Note that this should be done even when the file is a duplicate of an",
"# already-added file, as the tensor reference should still exist.",
"write_fn",
"(",
"asset_filename",
",",
"asset_tensor",
")",
"# In the cases where we are adding a duplicate, this will result in the",
"# last of the filepaths being the one used for copying the file to the",
"# SavedModel. Since the files in question are the same, it doesn't matter",
"# either way.",
"asset_filename_map",
"[",
"asset_filename",
"]",
"=",
"asset_source_filepath",
"tf_logging",
".",
"info",
"(",
"\"Assets added to graph.\"",
")",
"return",
"asset_filename_map"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/saved_model/builder_impl.py#L627-L670 |
|
mozilla/DeepSpeech | aa1d28530d531d0d92289bf5f11a49fe516fdc86 | native_client/python/__init__.py | python | TokenMetadata.start_time | (self) | Position of the token in seconds | Position of the token in seconds | [
"Position",
"of",
"the",
"token",
"in",
"seconds"
] | def start_time(self):
"""
Position of the token in seconds
""" | [
"def",
"start_time",
"(",
"self",
")",
":"
] | https://github.com/mozilla/DeepSpeech/blob/aa1d28530d531d0d92289bf5f11a49fe516fdc86/native_client/python/__init__.py#L321-L324 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/venv/__init__.py | python | EnvBuilder._setup_pip | (self, context) | Installs or upgrades pip in a virtual environment | Installs or upgrades pip in a virtual environment | [
"Installs",
"or",
"upgrades",
"pip",
"in",
"a",
"virtual",
"environment"
] | def _setup_pip(self, context):
"""Installs or upgrades pip in a virtual environment"""
# We run ensurepip in isolated mode to avoid side effects from
# environment vars, the current directory and anything else
# intended for the global Python environment
cmd = [context.env_exe, '-Im', 'ensurepip', '--upgrade',
'--default-pip']
subprocess.check_output(cmd, stderr=subprocess.STDOUT) | [
"def",
"_setup_pip",
"(",
"self",
",",
"context",
")",
":",
"# We run ensurepip in isolated mode to avoid side effects from",
"# environment vars, the current directory and anything else",
"# intended for the global Python environment",
"cmd",
"=",
"[",
"context",
".",
"env_exe",
",",
"'-Im'",
",",
"'ensurepip'",
",",
"'--upgrade'",
",",
"'--default-pip'",
"]",
"subprocess",
".",
"check_output",
"(",
"cmd",
",",
"stderr",
"=",
"subprocess",
".",
"STDOUT",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/venv/__init__.py#L281-L288 |
||
SpaceNetChallenge/BuildingDetectors | 3def3c44b5847c744cd2f3356182892d92496579 | qinhaifang/src/caffe-mnc/scripts/cpp_lint.py | python | CheckForMultilineCommentsAndStrings | (filename, clean_lines, linenum, error) | Logs an error if we see /* ... */ or "..." that extend past one line.
/* ... */ comments are legit inside macros, for one line.
Otherwise, we prefer // comments, so it's ok to warn about the
other. Likewise, it's ok for strings to extend across multiple
lines, as long as a line continuation character (backslash)
terminates each line. Although not currently prohibited by the C++
style guide, it's ugly and unnecessary. We don't do well with either
in this lint program, so we warn about both.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found. | Logs an error if we see /* ... */ or "..." that extend past one line. | [
"Logs",
"an",
"error",
"if",
"we",
"see",
"/",
"*",
"...",
"*",
"/",
"or",
"...",
"that",
"extend",
"past",
"one",
"line",
"."
] | def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error):
"""Logs an error if we see /* ... */ or "..." that extend past one line.
/* ... */ comments are legit inside macros, for one line.
Otherwise, we prefer // comments, so it's ok to warn about the
other. Likewise, it's ok for strings to extend across multiple
lines, as long as a line continuation character (backslash)
terminates each line. Although not currently prohibited by the C++
style guide, it's ugly and unnecessary. We don't do well with either
in this lint program, so we warn about both.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
# Remove all \\ (escaped backslashes) from the line. They are OK, and the
# second (escaped) slash may trigger later \" detection erroneously.
line = line.replace('\\\\', '')
if line.count('/*') > line.count('*/'):
error(filename, linenum, 'readability/multiline_comment', 5,
'Complex multi-line /*...*/-style comment found. '
'Lint may give bogus warnings. '
'Consider replacing these with //-style comments, '
'with #if 0...#endif, '
'or with more clearly structured multi-line comments.')
if (line.count('"') - line.count('\\"')) % 2:
error(filename, linenum, 'readability/multiline_string', 5,
'Multi-line string ("...") found. This lint script doesn\'t '
'do well with such strings, and may give bogus warnings. '
'Use C++11 raw strings or concatenation instead.') | [
"def",
"CheckForMultilineCommentsAndStrings",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"# Remove all \\\\ (escaped backslashes) from the line. They are OK, and the",
"# second (escaped) slash may trigger later \\\" detection erroneously.",
"line",
"=",
"line",
".",
"replace",
"(",
"'\\\\\\\\'",
",",
"''",
")",
"if",
"line",
".",
"count",
"(",
"'/*'",
")",
">",
"line",
".",
"count",
"(",
"'*/'",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/multiline_comment'",
",",
"5",
",",
"'Complex multi-line /*...*/-style comment found. '",
"'Lint may give bogus warnings. '",
"'Consider replacing these with //-style comments, '",
"'with #if 0...#endif, '",
"'or with more clearly structured multi-line comments.'",
")",
"if",
"(",
"line",
".",
"count",
"(",
"'\"'",
")",
"-",
"line",
".",
"count",
"(",
"'\\\\\"'",
")",
")",
"%",
"2",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/multiline_string'",
",",
"5",
",",
"'Multi-line string (\"...\") found. This lint script doesn\\'t '",
"'do well with such strings, and may give bogus warnings. '",
"'Use C++11 raw strings or concatenation instead.'",
")"
] | https://github.com/SpaceNetChallenge/BuildingDetectors/blob/3def3c44b5847c744cd2f3356182892d92496579/qinhaifang/src/caffe-mnc/scripts/cpp_lint.py#L1526-L1561 |
||
Z3Prover/z3 | d745d03afdfdf638d66093e2bfbacaf87187f35b | src/api/python/z3/z3.py | python | Probe.__lt__ | (self, other) | return Probe(Z3_probe_lt(self.ctx.ref(), self.probe, _to_probe(other, self.ctx).probe), self.ctx) | Return a probe that evaluates to "true" when the value returned by `self`
is less than the value returned by `other`.
>>> p = Probe('size') < 10
>>> x = Int('x')
>>> g = Goal()
>>> g.add(x > 0)
>>> g.add(x < 10)
>>> p(g)
1.0 | Return a probe that evaluates to "true" when the value returned by `self`
is less than the value returned by `other`. | [
"Return",
"a",
"probe",
"that",
"evaluates",
"to",
"true",
"when",
"the",
"value",
"returned",
"by",
"self",
"is",
"less",
"than",
"the",
"value",
"returned",
"by",
"other",
"."
] | def __lt__(self, other):
"""Return a probe that evaluates to "true" when the value returned by `self`
is less than the value returned by `other`.
>>> p = Probe('size') < 10
>>> x = Int('x')
>>> g = Goal()
>>> g.add(x > 0)
>>> g.add(x < 10)
>>> p(g)
1.0
"""
return Probe(Z3_probe_lt(self.ctx.ref(), self.probe, _to_probe(other, self.ctx).probe), self.ctx) | [
"def",
"__lt__",
"(",
"self",
",",
"other",
")",
":",
"return",
"Probe",
"(",
"Z3_probe_lt",
"(",
"self",
".",
"ctx",
".",
"ref",
"(",
")",
",",
"self",
".",
"probe",
",",
"_to_probe",
"(",
"other",
",",
"self",
".",
"ctx",
")",
".",
"probe",
")",
",",
"self",
".",
"ctx",
")"
] | https://github.com/Z3Prover/z3/blob/d745d03afdfdf638d66093e2bfbacaf87187f35b/src/api/python/z3/z3.py#L8437-L8449 |
|
neopenx/Dragon | 0e639a7319035ddc81918bd3df059230436ee0a1 | Dragon/python/dragon/vm/caffe/coord_map.py | python | compose | (base_map, next_map) | return ax, a1 * a2, a1 * b2 + b1 | Compose a base coord map with scale a1, shift b1 with a further coord map
with scale a2, shift b2. The scales multiply and the further shift, b2,
is scaled by base coord scale a1. | Compose a base coord map with scale a1, shift b1 with a further coord map
with scale a2, shift b2. The scales multiply and the further shift, b2,
is scaled by base coord scale a1. | [
"Compose",
"a",
"base",
"coord",
"map",
"with",
"scale",
"a1",
"shift",
"b1",
"with",
"a",
"further",
"coord",
"map",
"with",
"scale",
"a2",
"shift",
"b2",
".",
"The",
"scales",
"multiply",
"and",
"the",
"further",
"shift",
"b2",
"is",
"scaled",
"by",
"base",
"coord",
"scale",
"a1",
"."
] | def compose(base_map, next_map):
"""
Compose a base coord map with scale a1, shift b1 with a further coord map
with scale a2, shift b2. The scales multiply and the further shift, b2,
is scaled by base coord scale a1.
"""
ax1, a1, b1 = base_map
ax2, a2, b2 = next_map
if ax1 is None:
ax = ax2
elif ax2 is None or ax1 == ax2:
ax = ax1
else:
raise AxisMismatchException
return ax, a1 * a2, a1 * b2 + b1 | [
"def",
"compose",
"(",
"base_map",
",",
"next_map",
")",
":",
"ax1",
",",
"a1",
",",
"b1",
"=",
"base_map",
"ax2",
",",
"a2",
",",
"b2",
"=",
"next_map",
"if",
"ax1",
"is",
"None",
":",
"ax",
"=",
"ax2",
"elif",
"ax2",
"is",
"None",
"or",
"ax1",
"==",
"ax2",
":",
"ax",
"=",
"ax1",
"else",
":",
"raise",
"AxisMismatchException",
"return",
"ax",
",",
"a1",
"*",
"a2",
",",
"a1",
"*",
"b2",
"+",
"b1"
] | https://github.com/neopenx/Dragon/blob/0e639a7319035ddc81918bd3df059230436ee0a1/Dragon/python/dragon/vm/caffe/coord_map.py#L89-L103 |
|
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/tools/saved_model_cli.py | python | run_saved_model_with_feed_dict | (saved_model_dir, tag_set, signature_def_key,
input_tensor_key_feed_dict, outdir,
overwrite_flag, tf_debug=False) | Runs SavedModel and fetch all outputs.
Runs the input dictionary through the MetaGraphDef within a SavedModel
specified by the given tag_set and SignatureDef. Also save the outputs to file
if outdir is not None.
Args:
saved_model_dir: Directory containing the SavedModel to execute.
tag_set: Group of tag(s) of the MetaGraphDef with the SignatureDef map, in
string format, separated by ','. For tag-set contains multiple tags, all
tags must be passed in.
signature_def_key: A SignatureDef key string.
input_tensor_key_feed_dict: A dictionary maps input keys to numpy ndarrays.
outdir: A directory to save the outputs to. If the directory doesn't exist,
it will be created.
overwrite_flag: A boolean flag to allow overwrite output file if file with
the same name exists.
tf_debug: A boolean flag to use TensorFlow Debugger (TFDBG) to observe the
intermediate Tensor values and runtime GraphDefs while running the
SavedModel.
Raises:
ValueError: When any of the input tensor keys is not valid.
RuntimeError: An error when output file already exists and overwrite is not
enabled. | Runs SavedModel and fetch all outputs. | [
"Runs",
"SavedModel",
"and",
"fetch",
"all",
"outputs",
"."
] | def run_saved_model_with_feed_dict(saved_model_dir, tag_set, signature_def_key,
input_tensor_key_feed_dict, outdir,
overwrite_flag, tf_debug=False):
"""Runs SavedModel and fetch all outputs.
Runs the input dictionary through the MetaGraphDef within a SavedModel
specified by the given tag_set and SignatureDef. Also save the outputs to file
if outdir is not None.
Args:
saved_model_dir: Directory containing the SavedModel to execute.
tag_set: Group of tag(s) of the MetaGraphDef with the SignatureDef map, in
string format, separated by ','. For tag-set contains multiple tags, all
tags must be passed in.
signature_def_key: A SignatureDef key string.
input_tensor_key_feed_dict: A dictionary maps input keys to numpy ndarrays.
outdir: A directory to save the outputs to. If the directory doesn't exist,
it will be created.
overwrite_flag: A boolean flag to allow overwrite output file if file with
the same name exists.
tf_debug: A boolean flag to use TensorFlow Debugger (TFDBG) to observe the
intermediate Tensor values and runtime GraphDefs while running the
SavedModel.
Raises:
ValueError: When any of the input tensor keys is not valid.
RuntimeError: An error when output file already exists and overwrite is not
enabled.
"""
# Get a list of output tensor names.
meta_graph_def = get_meta_graph_def(saved_model_dir, tag_set)
# Re-create feed_dict based on input tensor name instead of key as session.run
# uses tensor name.
inputs_tensor_info = _get_inputs_tensor_info_from_meta_graph_def(
meta_graph_def, signature_def_key)
# Check if input tensor keys are valid.
for input_key_name in input_tensor_key_feed_dict.keys():
if input_key_name not in inputs_tensor_info.keys():
raise ValueError(
'"%s" is not a valid input key. Please choose from %s, or use '
'--show option.' %
(input_key_name, '"' + '", "'.join(inputs_tensor_info.keys()) + '"'))
inputs_feed_dict = {
inputs_tensor_info[key].name: tensor
for key, tensor in input_tensor_key_feed_dict.items()
}
# Get outputs
outputs_tensor_info = _get_outputs_tensor_info_from_meta_graph_def(
meta_graph_def, signature_def_key)
# Sort to preserve order because we need to go from value to key later.
output_tensor_keys_sorted = sorted(outputs_tensor_info.keys())
output_tensor_names_sorted = [
outputs_tensor_info[tensor_key].name
for tensor_key in output_tensor_keys_sorted
]
with session.Session(graph=ops_lib.Graph()) as sess:
loader.load(sess, tag_set.split(','), saved_model_dir)
if tf_debug:
sess = local_cli_wrapper.LocalCLIDebugWrapperSession(sess)
outputs = sess.run(output_tensor_names_sorted, feed_dict=inputs_feed_dict)
for i, output in enumerate(outputs):
output_tensor_key = output_tensor_keys_sorted[i]
print('Result for output key %s:\n%s' % (output_tensor_key, output))
# Only save if outdir is specified.
if outdir:
# Create directory if outdir does not exist
if not os.path.isdir(outdir):
os.makedirs(outdir)
output_full_path = os.path.join(outdir, output_tensor_key + '.npy')
# If overwrite not enabled and file already exist, error out
if not overwrite_flag and os.path.exists(output_full_path):
raise RuntimeError(
'Output file %s already exists. Add \"--overwrite\" to overwrite'
' the existing output files.' % output_full_path)
np.save(output_full_path, output)
print('Output %s is saved to %s' % (output_tensor_key,
output_full_path)) | [
"def",
"run_saved_model_with_feed_dict",
"(",
"saved_model_dir",
",",
"tag_set",
",",
"signature_def_key",
",",
"input_tensor_key_feed_dict",
",",
"outdir",
",",
"overwrite_flag",
",",
"tf_debug",
"=",
"False",
")",
":",
"# Get a list of output tensor names.",
"meta_graph_def",
"=",
"get_meta_graph_def",
"(",
"saved_model_dir",
",",
"tag_set",
")",
"# Re-create feed_dict based on input tensor name instead of key as session.run",
"# uses tensor name.",
"inputs_tensor_info",
"=",
"_get_inputs_tensor_info_from_meta_graph_def",
"(",
"meta_graph_def",
",",
"signature_def_key",
")",
"# Check if input tensor keys are valid.",
"for",
"input_key_name",
"in",
"input_tensor_key_feed_dict",
".",
"keys",
"(",
")",
":",
"if",
"input_key_name",
"not",
"in",
"inputs_tensor_info",
".",
"keys",
"(",
")",
":",
"raise",
"ValueError",
"(",
"'\"%s\" is not a valid input key. Please choose from %s, or use '",
"'--show option.'",
"%",
"(",
"input_key_name",
",",
"'\"'",
"+",
"'\", \"'",
".",
"join",
"(",
"inputs_tensor_info",
".",
"keys",
"(",
")",
")",
"+",
"'\"'",
")",
")",
"inputs_feed_dict",
"=",
"{",
"inputs_tensor_info",
"[",
"key",
"]",
".",
"name",
":",
"tensor",
"for",
"key",
",",
"tensor",
"in",
"input_tensor_key_feed_dict",
".",
"items",
"(",
")",
"}",
"# Get outputs",
"outputs_tensor_info",
"=",
"_get_outputs_tensor_info_from_meta_graph_def",
"(",
"meta_graph_def",
",",
"signature_def_key",
")",
"# Sort to preserve order because we need to go from value to key later.",
"output_tensor_keys_sorted",
"=",
"sorted",
"(",
"outputs_tensor_info",
".",
"keys",
"(",
")",
")",
"output_tensor_names_sorted",
"=",
"[",
"outputs_tensor_info",
"[",
"tensor_key",
"]",
".",
"name",
"for",
"tensor_key",
"in",
"output_tensor_keys_sorted",
"]",
"with",
"session",
".",
"Session",
"(",
"graph",
"=",
"ops_lib",
".",
"Graph",
"(",
")",
")",
"as",
"sess",
":",
"loader",
".",
"load",
"(",
"sess",
",",
"tag_set",
".",
"split",
"(",
"','",
")",
",",
"saved_model_dir",
")",
"if",
"tf_debug",
":",
"sess",
"=",
"local_cli_wrapper",
".",
"LocalCLIDebugWrapperSession",
"(",
"sess",
")",
"outputs",
"=",
"sess",
".",
"run",
"(",
"output_tensor_names_sorted",
",",
"feed_dict",
"=",
"inputs_feed_dict",
")",
"for",
"i",
",",
"output",
"in",
"enumerate",
"(",
"outputs",
")",
":",
"output_tensor_key",
"=",
"output_tensor_keys_sorted",
"[",
"i",
"]",
"print",
"(",
"'Result for output key %s:\\n%s'",
"%",
"(",
"output_tensor_key",
",",
"output",
")",
")",
"# Only save if outdir is specified.",
"if",
"outdir",
":",
"# Create directory if outdir does not exist",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"outdir",
")",
":",
"os",
".",
"makedirs",
"(",
"outdir",
")",
"output_full_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"outdir",
",",
"output_tensor_key",
"+",
"'.npy'",
")",
"# If overwrite not enabled and file already exist, error out",
"if",
"not",
"overwrite_flag",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"output_full_path",
")",
":",
"raise",
"RuntimeError",
"(",
"'Output file %s already exists. Add \\\"--overwrite\\\" to overwrite'",
"' the existing output files.'",
"%",
"output_full_path",
")",
"np",
".",
"save",
"(",
"output_full_path",
",",
"output",
")",
"print",
"(",
"'Output %s is saved to %s'",
"%",
"(",
"output_tensor_key",
",",
"output_full_path",
")",
")"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/tools/saved_model_cli.py#L233-L319 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/targets/setobj.py | python | is_hash_deleted | (context, builder, h) | return builder.icmp_unsigned('==', h, deleted) | Whether the hash value denotes a deleted entry. | Whether the hash value denotes a deleted entry. | [
"Whether",
"the",
"hash",
"value",
"denotes",
"a",
"deleted",
"entry",
"."
] | def is_hash_deleted(context, builder, h):
"""
Whether the hash value denotes a deleted entry.
"""
deleted = ir.Constant(h.type, DELETED)
return builder.icmp_unsigned('==', h, deleted) | [
"def",
"is_hash_deleted",
"(",
"context",
",",
"builder",
",",
"h",
")",
":",
"deleted",
"=",
"ir",
".",
"Constant",
"(",
"h",
".",
"type",
",",
"DELETED",
")",
"return",
"builder",
".",
"icmp_unsigned",
"(",
"'=='",
",",
"h",
",",
"deleted",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/targets/setobj.py#L78-L83 |
|
apiaryio/drafter | 4634ebd07f6c6f257cc656598ccd535492fdfb55 | tools/gyp/pylib/gyp/msvs_emulation.py | python | MsvsSettings.AdjustMidlIncludeDirs | (self, midl_include_dirs, config) | return [self.ConvertVSMacros(p, config=config) for p in includes] | Updates midl_include_dirs to expand VS specific paths, and adds the
system include dirs used for platform SDK and similar. | Updates midl_include_dirs to expand VS specific paths, and adds the
system include dirs used for platform SDK and similar. | [
"Updates",
"midl_include_dirs",
"to",
"expand",
"VS",
"specific",
"paths",
"and",
"adds",
"the",
"system",
"include",
"dirs",
"used",
"for",
"platform",
"SDK",
"and",
"similar",
"."
] | def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
"""Updates midl_include_dirs to expand VS specific paths, and adds the
system include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = midl_include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes] | [
"def",
"AdjustMidlIncludeDirs",
"(",
"self",
",",
"midl_include_dirs",
",",
"config",
")",
":",
"config",
"=",
"self",
".",
"_TargetConfig",
"(",
"config",
")",
"includes",
"=",
"midl_include_dirs",
"+",
"self",
".",
"msvs_system_include_dirs",
"[",
"config",
"]",
"includes",
".",
"extend",
"(",
"self",
".",
"_Setting",
"(",
"(",
"'VCMIDLTool'",
",",
"'AdditionalIncludeDirectories'",
")",
",",
"config",
",",
"default",
"=",
"[",
"]",
")",
")",
"return",
"[",
"self",
".",
"ConvertVSMacros",
"(",
"p",
",",
"config",
"=",
"config",
")",
"for",
"p",
"in",
"includes",
"]"
] | https://github.com/apiaryio/drafter/blob/4634ebd07f6c6f257cc656598ccd535492fdfb55/tools/gyp/pylib/gyp/msvs_emulation.py#L341-L348 |
|
happynear/caffe-windows | 967eedf25009e334b7f6f933bb5e17aaaff5bef6 | examples/pycaffe/tools.py | python | SimpleTransformer.preprocess | (self, im) | return im | preprocess() emulate the pre-processing occurring in the vgg16 caffe
prototxt. | preprocess() emulate the pre-processing occurring in the vgg16 caffe
prototxt. | [
"preprocess",
"()",
"emulate",
"the",
"pre",
"-",
"processing",
"occurring",
"in",
"the",
"vgg16",
"caffe",
"prototxt",
"."
] | def preprocess(self, im):
"""
preprocess() emulate the pre-processing occurring in the vgg16 caffe
prototxt.
"""
im = np.float32(im)
im = im[:, :, ::-1] # change to BGR
im -= self.mean
im *= self.scale
im = im.transpose((2, 0, 1))
return im | [
"def",
"preprocess",
"(",
"self",
",",
"im",
")",
":",
"im",
"=",
"np",
".",
"float32",
"(",
"im",
")",
"im",
"=",
"im",
"[",
":",
",",
":",
",",
":",
":",
"-",
"1",
"]",
"# change to BGR",
"im",
"-=",
"self",
".",
"mean",
"im",
"*=",
"self",
".",
"scale",
"im",
"=",
"im",
".",
"transpose",
"(",
"(",
"2",
",",
"0",
",",
"1",
")",
")",
"return",
"im"
] | https://github.com/happynear/caffe-windows/blob/967eedf25009e334b7f6f933bb5e17aaaff5bef6/examples/pycaffe/tools.py#L27-L39 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/ipython/py3/IPython/core/completer.py | python | rectify_completions | (text: str, completions: _IC, *, _debug=False) | Rectify a set of completions to all have the same ``start`` and ``end``
.. warning::
Unstable
This function is unstable, API may change without warning.
It will also raise unless use in proper context manager.
Parameters
----------
text: str
text that should be completed.
completions: Iterator[Completion]
iterator over the completions to rectify
:any:`jedi.api.classes.Completion` s returned by Jedi may not have the same start and end, though
the Jupyter Protocol requires them to behave like so. This will readjust
the completion to have the same ``start`` and ``end`` by padding both
extremities with surrounding text.
During stabilisation should support a ``_debug`` option to log which
completion are return by the IPython completer and not found in Jedi in
order to make upstream bug report. | Rectify a set of completions to all have the same ``start`` and ``end`` | [
"Rectify",
"a",
"set",
"of",
"completions",
"to",
"all",
"have",
"the",
"same",
"start",
"and",
"end"
] | def rectify_completions(text: str, completions: _IC, *, _debug=False)->_IC:
"""
Rectify a set of completions to all have the same ``start`` and ``end``
.. warning::
Unstable
This function is unstable, API may change without warning.
It will also raise unless use in proper context manager.
Parameters
----------
text: str
text that should be completed.
completions: Iterator[Completion]
iterator over the completions to rectify
:any:`jedi.api.classes.Completion` s returned by Jedi may not have the same start and end, though
the Jupyter Protocol requires them to behave like so. This will readjust
the completion to have the same ``start`` and ``end`` by padding both
extremities with surrounding text.
During stabilisation should support a ``_debug`` option to log which
completion are return by the IPython completer and not found in Jedi in
order to make upstream bug report.
"""
warnings.warn("`rectify_completions` is a provisional API (as of IPython 6.0). "
"It may change without warnings. "
"Use in corresponding context manager.",
category=ProvisionalCompleterWarning, stacklevel=2)
completions = list(completions)
if not completions:
return
starts = (c.start for c in completions)
ends = (c.end for c in completions)
new_start = min(starts)
new_end = max(ends)
seen_jedi = set()
seen_python_matches = set()
for c in completions:
new_text = text[new_start:c.start] + c.text + text[c.end:new_end]
if c._origin == 'jedi':
seen_jedi.add(new_text)
elif c._origin == 'IPCompleter.python_matches':
seen_python_matches.add(new_text)
yield Completion(new_start, new_end, new_text, type=c.type, _origin=c._origin, signature=c.signature)
diff = seen_python_matches.difference(seen_jedi)
if diff and _debug:
print('IPython.python matches have extras:', diff) | [
"def",
"rectify_completions",
"(",
"text",
":",
"str",
",",
"completions",
":",
"_IC",
",",
"*",
",",
"_debug",
"=",
"False",
")",
"->",
"_IC",
":",
"warnings",
".",
"warn",
"(",
"\"`rectify_completions` is a provisional API (as of IPython 6.0). \"",
"\"It may change without warnings. \"",
"\"Use in corresponding context manager.\"",
",",
"category",
"=",
"ProvisionalCompleterWarning",
",",
"stacklevel",
"=",
"2",
")",
"completions",
"=",
"list",
"(",
"completions",
")",
"if",
"not",
"completions",
":",
"return",
"starts",
"=",
"(",
"c",
".",
"start",
"for",
"c",
"in",
"completions",
")",
"ends",
"=",
"(",
"c",
".",
"end",
"for",
"c",
"in",
"completions",
")",
"new_start",
"=",
"min",
"(",
"starts",
")",
"new_end",
"=",
"max",
"(",
"ends",
")",
"seen_jedi",
"=",
"set",
"(",
")",
"seen_python_matches",
"=",
"set",
"(",
")",
"for",
"c",
"in",
"completions",
":",
"new_text",
"=",
"text",
"[",
"new_start",
":",
"c",
".",
"start",
"]",
"+",
"c",
".",
"text",
"+",
"text",
"[",
"c",
".",
"end",
":",
"new_end",
"]",
"if",
"c",
".",
"_origin",
"==",
"'jedi'",
":",
"seen_jedi",
".",
"add",
"(",
"new_text",
")",
"elif",
"c",
".",
"_origin",
"==",
"'IPCompleter.python_matches'",
":",
"seen_python_matches",
".",
"add",
"(",
"new_text",
")",
"yield",
"Completion",
"(",
"new_start",
",",
"new_end",
",",
"new_text",
",",
"type",
"=",
"c",
".",
"type",
",",
"_origin",
"=",
"c",
".",
"_origin",
",",
"signature",
"=",
"c",
".",
"signature",
")",
"diff",
"=",
"seen_python_matches",
".",
"difference",
"(",
"seen_jedi",
")",
"if",
"diff",
"and",
"_debug",
":",
"print",
"(",
"'IPython.python matches have extras:'",
",",
"diff",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py3/IPython/core/completer.py#L465-L518 |
||
hpi-xnor/BMXNet | ed0b201da6667887222b8e4b5f997c4f6b61943d | example/rcnn/rcnn/dataset/coco.py | python | coco.__init__ | (self, image_set, root_path, data_path) | fill basic information to initialize imdb
:param image_set: train2014, val2014, test2015
:param root_path: 'data', will write 'rpn_data', 'cache'
:param data_path: 'data/coco' | fill basic information to initialize imdb
:param image_set: train2014, val2014, test2015
:param root_path: 'data', will write 'rpn_data', 'cache'
:param data_path: 'data/coco' | [
"fill",
"basic",
"information",
"to",
"initialize",
"imdb",
":",
"param",
"image_set",
":",
"train2014",
"val2014",
"test2015",
":",
"param",
"root_path",
":",
"data",
"will",
"write",
"rpn_data",
"cache",
":",
"param",
"data_path",
":",
"data",
"/",
"coco"
] | def __init__(self, image_set, root_path, data_path):
"""
fill basic information to initialize imdb
:param image_set: train2014, val2014, test2015
:param root_path: 'data', will write 'rpn_data', 'cache'
:param data_path: 'data/coco'
"""
super(coco, self).__init__('COCO', image_set, root_path, data_path)
self.root_path = root_path
self.data_path = data_path
self.coco = COCO(self._get_ann_file())
# deal with class names
cats = [cat['name'] for cat in self.coco.loadCats(self.coco.getCatIds())]
self.classes = ['__background__'] + cats
self.num_classes = len(self.classes)
self._class_to_ind = dict(zip(self.classes, xrange(self.num_classes)))
self._class_to_coco_ind = dict(zip(cats, self.coco.getCatIds()))
self._coco_ind_to_class_ind = dict([(self._class_to_coco_ind[cls], self._class_to_ind[cls])
for cls in self.classes[1:]])
# load image file names
self.image_set_index = self._load_image_set_index()
self.num_images = len(self.image_set_index)
logger.info('%s num_images %d' % (self.name, self.num_images))
# deal with data name
view_map = {'minival2014': 'val2014',
'valminusminival2014': 'val2014'}
self.data_name = view_map[image_set] if image_set in view_map else image_set | [
"def",
"__init__",
"(",
"self",
",",
"image_set",
",",
"root_path",
",",
"data_path",
")",
":",
"super",
"(",
"coco",
",",
"self",
")",
".",
"__init__",
"(",
"'COCO'",
",",
"image_set",
",",
"root_path",
",",
"data_path",
")",
"self",
".",
"root_path",
"=",
"root_path",
"self",
".",
"data_path",
"=",
"data_path",
"self",
".",
"coco",
"=",
"COCO",
"(",
"self",
".",
"_get_ann_file",
"(",
")",
")",
"# deal with class names",
"cats",
"=",
"[",
"cat",
"[",
"'name'",
"]",
"for",
"cat",
"in",
"self",
".",
"coco",
".",
"loadCats",
"(",
"self",
".",
"coco",
".",
"getCatIds",
"(",
")",
")",
"]",
"self",
".",
"classes",
"=",
"[",
"'__background__'",
"]",
"+",
"cats",
"self",
".",
"num_classes",
"=",
"len",
"(",
"self",
".",
"classes",
")",
"self",
".",
"_class_to_ind",
"=",
"dict",
"(",
"zip",
"(",
"self",
".",
"classes",
",",
"xrange",
"(",
"self",
".",
"num_classes",
")",
")",
")",
"self",
".",
"_class_to_coco_ind",
"=",
"dict",
"(",
"zip",
"(",
"cats",
",",
"self",
".",
"coco",
".",
"getCatIds",
"(",
")",
")",
")",
"self",
".",
"_coco_ind_to_class_ind",
"=",
"dict",
"(",
"[",
"(",
"self",
".",
"_class_to_coco_ind",
"[",
"cls",
"]",
",",
"self",
".",
"_class_to_ind",
"[",
"cls",
"]",
")",
"for",
"cls",
"in",
"self",
".",
"classes",
"[",
"1",
":",
"]",
"]",
")",
"# load image file names",
"self",
".",
"image_set_index",
"=",
"self",
".",
"_load_image_set_index",
"(",
")",
"self",
".",
"num_images",
"=",
"len",
"(",
"self",
".",
"image_set_index",
")",
"logger",
".",
"info",
"(",
"'%s num_images %d'",
"%",
"(",
"self",
".",
"name",
",",
"self",
".",
"num_images",
")",
")",
"# deal with data name",
"view_map",
"=",
"{",
"'minival2014'",
":",
"'val2014'",
",",
"'valminusminival2014'",
":",
"'val2014'",
"}",
"self",
".",
"data_name",
"=",
"view_map",
"[",
"image_set",
"]",
"if",
"image_set",
"in",
"view_map",
"else",
"image_set"
] | https://github.com/hpi-xnor/BMXNet/blob/ed0b201da6667887222b8e4b5f997c4f6b61943d/example/rcnn/rcnn/dataset/coco.py#L34-L63 |
||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/x86/toolchain/lib/python2.7/imaplib.py | python | IMAP4.getquota | (self, root) | return self._untagged_response(typ, dat, 'QUOTA') | Get the quota root's resource usage and limits.
Part of the IMAP4 QUOTA extension defined in rfc2087.
(typ, [data]) = <instance>.getquota(root) | Get the quota root's resource usage and limits. | [
"Get",
"the",
"quota",
"root",
"s",
"resource",
"usage",
"and",
"limits",
"."
] | def getquota(self, root):
"""Get the quota root's resource usage and limits.
Part of the IMAP4 QUOTA extension defined in rfc2087.
(typ, [data]) = <instance>.getquota(root)
"""
typ, dat = self._simple_command('GETQUOTA', root)
return self._untagged_response(typ, dat, 'QUOTA') | [
"def",
"getquota",
"(",
"self",
",",
"root",
")",
":",
"typ",
",",
"dat",
"=",
"self",
".",
"_simple_command",
"(",
"'GETQUOTA'",
",",
"root",
")",
"return",
"self",
".",
"_untagged_response",
"(",
"typ",
",",
"dat",
",",
"'QUOTA'",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/imaplib.py#L464-L472 |
|
hughperkins/tf-coriander | 970d3df6c11400ad68405f22b0c42a52374e94ca | tensorflow/contrib/rnn/python/ops/rnn_cell.py | python | LayerNormBasicLSTMCell.__init__ | (self, num_units, forget_bias=1.0,
input_size=None, activation=math_ops.tanh,
layer_norm=True, norm_gain=1.0, norm_shift=0.0,
dropout_keep_prob=1.0, dropout_prob_seed=None) | Initializes the basic LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (see above).
input_size: Deprecated and unused.
activation: Activation function of the inner states.
layer_norm: If `True`, layer normalization will be applied.
norm_gain: float, The layer normalization gain initial value. If
`layer_norm` has been set to `False`, this argument will be ignored.
norm_shift: float, The layer normalization shift initial value. If
`layer_norm` has been set to `False`, this argument will be ignored.
dropout_keep_prob: unit Tensor or float between 0 and 1 representing the
recurrent dropout probability value. If float and 1.0, no dropout will
be applied.
dropout_prob_seed: (optional) integer, the randomness seed. | Initializes the basic LSTM cell. | [
"Initializes",
"the",
"basic",
"LSTM",
"cell",
"."
] | def __init__(self, num_units, forget_bias=1.0,
input_size=None, activation=math_ops.tanh,
layer_norm=True, norm_gain=1.0, norm_shift=0.0,
dropout_keep_prob=1.0, dropout_prob_seed=None):
"""Initializes the basic LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (see above).
input_size: Deprecated and unused.
activation: Activation function of the inner states.
layer_norm: If `True`, layer normalization will be applied.
norm_gain: float, The layer normalization gain initial value. If
`layer_norm` has been set to `False`, this argument will be ignored.
norm_shift: float, The layer normalization shift initial value. If
`layer_norm` has been set to `False`, this argument will be ignored.
dropout_keep_prob: unit Tensor or float between 0 and 1 representing the
recurrent dropout probability value. If float and 1.0, no dropout will
be applied.
dropout_prob_seed: (optional) integer, the randomness seed.
"""
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
self._num_units = num_units
self._activation = activation
self._forget_bias = forget_bias
self._keep_prob = dropout_keep_prob
self._seed = dropout_prob_seed
self._layer_norm = layer_norm
self._g = norm_gain
self._b = norm_shift | [
"def",
"__init__",
"(",
"self",
",",
"num_units",
",",
"forget_bias",
"=",
"1.0",
",",
"input_size",
"=",
"None",
",",
"activation",
"=",
"math_ops",
".",
"tanh",
",",
"layer_norm",
"=",
"True",
",",
"norm_gain",
"=",
"1.0",
",",
"norm_shift",
"=",
"0.0",
",",
"dropout_keep_prob",
"=",
"1.0",
",",
"dropout_prob_seed",
"=",
"None",
")",
":",
"if",
"input_size",
"is",
"not",
"None",
":",
"logging",
".",
"warn",
"(",
"\"%s: The input_size parameter is deprecated.\"",
",",
"self",
")",
"self",
".",
"_num_units",
"=",
"num_units",
"self",
".",
"_activation",
"=",
"activation",
"self",
".",
"_forget_bias",
"=",
"forget_bias",
"self",
".",
"_keep_prob",
"=",
"dropout_keep_prob",
"self",
".",
"_seed",
"=",
"dropout_prob_seed",
"self",
".",
"_layer_norm",
"=",
"layer_norm",
"self",
".",
"_g",
"=",
"norm_gain",
"self",
".",
"_b",
"=",
"norm_shift"
] | https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/contrib/rnn/python/ops/rnn_cell.py#L911-L943 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/pandas/py2/pandas/core/indexes/multi.py | python | MultiIndex.append | (self, other) | Append a collection of Index options together
Parameters
----------
other : Index or list/tuple of indices
Returns
-------
appended : Index | Append a collection of Index options together | [
"Append",
"a",
"collection",
"of",
"Index",
"options",
"together"
] | def append(self, other):
"""
Append a collection of Index options together
Parameters
----------
other : Index or list/tuple of indices
Returns
-------
appended : Index
"""
if not isinstance(other, (list, tuple)):
other = [other]
if all((isinstance(o, MultiIndex) and o.nlevels >= self.nlevels)
for o in other):
arrays = []
for i in range(self.nlevels):
label = self._get_level_values(i)
appended = [o._get_level_values(i) for o in other]
arrays.append(label.append(appended))
return MultiIndex.from_arrays(arrays, names=self.names)
to_concat = (self.values, ) + tuple(k._values for k in other)
new_tuples = np.concatenate(to_concat)
# if all(isinstance(x, MultiIndex) for x in other):
try:
return MultiIndex.from_tuples(new_tuples, names=self.names)
except (TypeError, IndexError):
return Index(new_tuples) | [
"def",
"append",
"(",
"self",
",",
"other",
")",
":",
"if",
"not",
"isinstance",
"(",
"other",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"other",
"=",
"[",
"other",
"]",
"if",
"all",
"(",
"(",
"isinstance",
"(",
"o",
",",
"MultiIndex",
")",
"and",
"o",
".",
"nlevels",
">=",
"self",
".",
"nlevels",
")",
"for",
"o",
"in",
"other",
")",
":",
"arrays",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"nlevels",
")",
":",
"label",
"=",
"self",
".",
"_get_level_values",
"(",
"i",
")",
"appended",
"=",
"[",
"o",
".",
"_get_level_values",
"(",
"i",
")",
"for",
"o",
"in",
"other",
"]",
"arrays",
".",
"append",
"(",
"label",
".",
"append",
"(",
"appended",
")",
")",
"return",
"MultiIndex",
".",
"from_arrays",
"(",
"arrays",
",",
"names",
"=",
"self",
".",
"names",
")",
"to_concat",
"=",
"(",
"self",
".",
"values",
",",
")",
"+",
"tuple",
"(",
"k",
".",
"_values",
"for",
"k",
"in",
"other",
")",
"new_tuples",
"=",
"np",
".",
"concatenate",
"(",
"to_concat",
")",
"# if all(isinstance(x, MultiIndex) for x in other):",
"try",
":",
"return",
"MultiIndex",
".",
"from_tuples",
"(",
"new_tuples",
",",
"names",
"=",
"self",
".",
"names",
")",
"except",
"(",
"TypeError",
",",
"IndexError",
")",
":",
"return",
"Index",
"(",
"new_tuples",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/indexes/multi.py#L1824-L1855 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/packaging/tags.py | python | _glibc_version_string_ctypes | () | return version_str | Fallback implementation of glibc_version_string using ctypes. | Fallback implementation of glibc_version_string using ctypes. | [
"Fallback",
"implementation",
"of",
"glibc_version_string",
"using",
"ctypes",
"."
] | def _glibc_version_string_ctypes():
# type: () -> Optional[str]
"""
Fallback implementation of glibc_version_string using ctypes.
"""
try:
import ctypes
except ImportError:
return None
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
# manpage says, "If filename is NULL, then the returned handle is for the
# main program". This way we can let the linker do the work to figure out
# which libc our process is actually using.
#
# We must also handle the special case where the executable is not a
# dynamically linked executable. This can occur when using musl libc,
# for example. In this situation, dlopen() will error, leading to an
# OSError. Interestingly, at least in the case of musl, there is no
# errno set on the OSError. The single string argument used to construct
# OSError comes from libc itself and is therefore not portable to
# hard code here. In any case, failure to call dlopen() means we
# can proceed, so we bail on our attempt.
try:
# Note: typeshed is wrong here so we are ignoring this line.
process_namespace = ctypes.CDLL(None) # type: ignore
except OSError:
return None
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
# Symbol doesn't exist -> therefore, we are not linked to
# glibc.
return None
# Call gnu_get_libc_version, which returns a string like "2.5"
gnu_get_libc_version.restype = ctypes.c_char_p
version_str = gnu_get_libc_version() # type: str
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
return version_str | [
"def",
"_glibc_version_string_ctypes",
"(",
")",
":",
"# type: () -> Optional[str]",
"try",
":",
"import",
"ctypes",
"except",
"ImportError",
":",
"return",
"None",
"# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen",
"# manpage says, \"If filename is NULL, then the returned handle is for the",
"# main program\". This way we can let the linker do the work to figure out",
"# which libc our process is actually using.",
"#",
"# We must also handle the special case where the executable is not a",
"# dynamically linked executable. This can occur when using musl libc,",
"# for example. In this situation, dlopen() will error, leading to an",
"# OSError. Interestingly, at least in the case of musl, there is no",
"# errno set on the OSError. The single string argument used to construct",
"# OSError comes from libc itself and is therefore not portable to",
"# hard code here. In any case, failure to call dlopen() means we",
"# can proceed, so we bail on our attempt.",
"try",
":",
"# Note: typeshed is wrong here so we are ignoring this line.",
"process_namespace",
"=",
"ctypes",
".",
"CDLL",
"(",
"None",
")",
"# type: ignore",
"except",
"OSError",
":",
"return",
"None",
"try",
":",
"gnu_get_libc_version",
"=",
"process_namespace",
".",
"gnu_get_libc_version",
"except",
"AttributeError",
":",
"# Symbol doesn't exist -> therefore, we are not linked to",
"# glibc.",
"return",
"None",
"# Call gnu_get_libc_version, which returns a string like \"2.5\"",
"gnu_get_libc_version",
".",
"restype",
"=",
"ctypes",
".",
"c_char_p",
"version_str",
"=",
"gnu_get_libc_version",
"(",
")",
"# type: str",
"# py2 / py3 compatibility:",
"if",
"not",
"isinstance",
"(",
"version_str",
",",
"str",
")",
":",
"version_str",
"=",
"version_str",
".",
"decode",
"(",
"\"ascii\"",
")",
"return",
"version_str"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/packaging/tags.py#L549-L592 |
|
PaddlePaddle/Paddle | 1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c | python/paddle/fluid/dygraph/jit.py | python | _register_save_pre_hook | (hook) | return HookRemoveHelper(hook) | Register a save pre-hook for `paddle.jit.save`.
This hook will be executed before `save` function has been invoked.
hook(layer, input_spec, configs) -> None
- layer (Layer|function): This argument is corresponding to `layer` in `paddle.jit.save`.
- input_spec (list or tuple[InputSpec|Tensor|Python built-in variable]): This argument is corresponding to `input_spec` in `paddle.jit.save`.
- configs (dict): This argument is corresponding to `configs` in `paddle.jit.save`.
Args:
hook(function): a function registered as a save pre-hook
Returns:
HookRemoveHelper: a HookRemoveHelper object that can be used to remove the added hook by calling `hook_remove_helper.remove()`.
Examples:
.. code-block:: python
import numpy as np
import paddle
IMAGE_SIZE = 256
CLASS_NUM = 10
class LinearNet(paddle.nn.Layer):
def __init__(self):
super(LinearNet, self).__init__()
self._linear = paddle.nn.Linear(IMAGE_SIZE, CLASS_NUM)
def forward(self, x):
return self._linear(x)
saving_count = 0
def save_pre_hook(layer, input_spec, configs):
global saving_count
saving_count += 1
remove_handler = paddle.jit.register_save_pre_hook(save_pre_hook)
layer = LinearNet()
paddle.jit.save(layer, "/tmp", [paddle.static.InputSpec(shape=[-1, IMAGE_SIZE])])
# saving_count == 1
remove_handler.remove()
paddle.jit.save(layer, "/tmp", [paddle.static.InputSpec(shape=[-1, IMAGE_SIZE])])
# saving_count == 1 | Register a save pre-hook for `paddle.jit.save`.
This hook will be executed before `save` function has been invoked. | [
"Register",
"a",
"save",
"pre",
"-",
"hook",
"for",
"paddle",
".",
"jit",
".",
"save",
".",
"This",
"hook",
"will",
"be",
"executed",
"before",
"save",
"function",
"has",
"been",
"invoked",
"."
] | def _register_save_pre_hook(hook):
"""
Register a save pre-hook for `paddle.jit.save`.
This hook will be executed before `save` function has been invoked.
hook(layer, input_spec, configs) -> None
- layer (Layer|function): This argument is corresponding to `layer` in `paddle.jit.save`.
- input_spec (list or tuple[InputSpec|Tensor|Python built-in variable]): This argument is corresponding to `input_spec` in `paddle.jit.save`.
- configs (dict): This argument is corresponding to `configs` in `paddle.jit.save`.
Args:
hook(function): a function registered as a save pre-hook
Returns:
HookRemoveHelper: a HookRemoveHelper object that can be used to remove the added hook by calling `hook_remove_helper.remove()`.
Examples:
.. code-block:: python
import numpy as np
import paddle
IMAGE_SIZE = 256
CLASS_NUM = 10
class LinearNet(paddle.nn.Layer):
def __init__(self):
super(LinearNet, self).__init__()
self._linear = paddle.nn.Linear(IMAGE_SIZE, CLASS_NUM)
def forward(self, x):
return self._linear(x)
saving_count = 0
def save_pre_hook(layer, input_spec, configs):
global saving_count
saving_count += 1
remove_handler = paddle.jit.register_save_pre_hook(save_pre_hook)
layer = LinearNet()
paddle.jit.save(layer, "/tmp", [paddle.static.InputSpec(shape=[-1, IMAGE_SIZE])])
# saving_count == 1
remove_handler.remove()
paddle.jit.save(layer, "/tmp", [paddle.static.InputSpec(shape=[-1, IMAGE_SIZE])])
# saving_count == 1
"""
global _save_pre_hooks_lock
global _save_pre_hooks
_save_pre_hooks_lock.acquire()
if hook not in _save_pre_hooks:
_save_pre_hooks.append(hook)
_save_pre_hooks_lock.release()
return HookRemoveHelper(hook) | [
"def",
"_register_save_pre_hook",
"(",
"hook",
")",
":",
"global",
"_save_pre_hooks_lock",
"global",
"_save_pre_hooks",
"_save_pre_hooks_lock",
".",
"acquire",
"(",
")",
"if",
"hook",
"not",
"in",
"_save_pre_hooks",
":",
"_save_pre_hooks",
".",
"append",
"(",
"hook",
")",
"_save_pre_hooks_lock",
".",
"release",
"(",
")",
"return",
"HookRemoveHelper",
"(",
"hook",
")"
] | https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/dygraph/jit.py#L544-L598 |
|
Kronuz/Xapiand | a71570859dcfc9f48090d845053f359b07f4f78c | contrib/python/xapiand-py/xapiand/client/indices.py | python | IndicesClient.ping | (self, params=None) | Returns True if the cluster is up, False otherwise. | Returns True if the cluster is up, False otherwise. | [
"Returns",
"True",
"if",
"the",
"cluster",
"is",
"up",
"False",
"otherwise",
"."
] | def ping(self, params=None):
"""
Returns True if the cluster is up, False otherwise.
"""
try:
return self.transport.perform_request('HEAD', '',
params=params)
except TransportError:
return False | [
"def",
"ping",
"(",
"self",
",",
"params",
"=",
"None",
")",
":",
"try",
":",
"return",
"self",
".",
"transport",
".",
"perform_request",
"(",
"'HEAD'",
",",
"''",
",",
"params",
"=",
"params",
")",
"except",
"TransportError",
":",
"return",
"False"
] | https://github.com/Kronuz/Xapiand/blob/a71570859dcfc9f48090d845053f359b07f4f78c/contrib/python/xapiand-py/xapiand/client/indices.py#L26-L34 |
||
miyosuda/TensorFlowAndroidMNIST | 7b5a4603d2780a8a2834575706e9001977524007 | jni-build/jni/include/tensorflow/contrib/distributions/python/ops/operator_pd_identity.py | python | OperatorPDIdentity.verify_pd | (self) | return self._verify_pd | Whether to verify that this `Operator` is positive definite. | Whether to verify that this `Operator` is positive definite. | [
"Whether",
"to",
"verify",
"that",
"this",
"Operator",
"is",
"positive",
"definite",
"."
] | def verify_pd(self):
"""Whether to verify that this `Operator` is positive definite."""
return self._verify_pd | [
"def",
"verify_pd",
"(",
"self",
")",
":",
"return",
"self",
".",
"_verify_pd"
] | https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/distributions/python/ops/operator_pd_identity.py#L129-L131 |
|
RobotLocomotion/drake | 0e18a34604c45ed65bc9018a54f7610f91cdad5b | bindings/pydrake/systems/jupyter_widgets.py | python | PoseSliders.SetRpy | (self, rpy) | Sets the current value of the sliders for roll, pitch, and yaw.
Args:
rpy: An instance of drake.math.RollPitchYaw | Sets the current value of the sliders for roll, pitch, and yaw.
Args:
rpy: An instance of drake.math.RollPitchYaw | [
"Sets",
"the",
"current",
"value",
"of",
"the",
"sliders",
"for",
"roll",
"pitch",
"and",
"yaw",
".",
"Args",
":",
"rpy",
":",
"An",
"instance",
"of",
"drake",
".",
"math",
".",
"RollPitchYaw"
] | def SetRpy(self, rpy):
"""
Sets the current value of the sliders for roll, pitch, and yaw.
Args:
rpy: An instance of drake.math.RollPitchYaw
"""
self._roll.value = rpy.roll_angle()
self._pitch.value = rpy.pitch_angle()
self._yaw.value = rpy.yaw_angle() | [
"def",
"SetRpy",
"(",
"self",
",",
"rpy",
")",
":",
"self",
".",
"_roll",
".",
"value",
"=",
"rpy",
".",
"roll_angle",
"(",
")",
"self",
".",
"_pitch",
".",
"value",
"=",
"rpy",
".",
"pitch_angle",
"(",
")",
"self",
".",
"_yaw",
".",
"value",
"=",
"rpy",
".",
"yaw_angle",
"(",
")"
] | https://github.com/RobotLocomotion/drake/blob/0e18a34604c45ed65bc9018a54f7610f91cdad5b/bindings/pydrake/systems/jupyter_widgets.py#L141-L149 |
||
Kitware/ParaView | f760af9124ff4634b23ebbeab95a4f56e0261955 | Plugins/pvblot/blotish.py | python | _require_plot_mode | (mode) | Check if the current tplot mode is the given mode. If there is a mismatch
raise a BlotishError. | Check if the current tplot mode is the given mode. If there is a mismatch
raise a BlotishError. | [
"Check",
"if",
"the",
"current",
"tplot",
"mode",
"is",
"the",
"given",
"mode",
".",
"If",
"there",
"is",
"a",
"mismatch",
"raise",
"a",
"BlotishError",
"."
] | def _require_plot_mode(mode):
"""Check if the current tplot mode is the given mode. If there is a mismatch
raise a BlotishError."""
current_mode = state.tplot.get_current_curve_mode()
if current_mode is not None:
if mode != current_mode:
raise BlotishError("Time curves and X-Y curves must be defined separately") | [
"def",
"_require_plot_mode",
"(",
"mode",
")",
":",
"current_mode",
"=",
"state",
".",
"tplot",
".",
"get_current_curve_mode",
"(",
")",
"if",
"current_mode",
"is",
"not",
"None",
":",
"if",
"mode",
"!=",
"current_mode",
":",
"raise",
"BlotishError",
"(",
"\"Time curves and X-Y curves must be defined separately\"",
")"
] | https://github.com/Kitware/ParaView/blob/f760af9124ff4634b23ebbeab95a4f56e0261955/Plugins/pvblot/blotish.py#L483-L489 |
||
pytorch/pytorch | 7176c92687d3cc847cc046bf002269c6949a21c2 | torch/distributed/rpc/api.py | python | rpc_sync | (to, func, args=None, kwargs=None, timeout=UNSET_RPC_TIMEOUT) | return fut.wait() | r"""
Make a blocking RPC call to run function ``func`` on worker ``to``. RPC
messages are sent and received in parallel to execution of Python code. This
method is thread-safe.
Args:
to (str or WorkerInfo or int): name/rank/``WorkerInfo`` of the destination worker.
func (callable): a callable function, such as Python callables, builtin
operators (e.g. :meth:`~torch.add`) and annotated
TorchScript functions.
args (tuple): the argument tuple for the ``func`` invocation.
kwargs (dict): is a dictionary of keyword arguments for the ``func``
invocation.
timeout (float, optional): timeout in seconds to use for this RPC. If
the RPC does not complete in this amount of
time, an exception indicating it has
timed out will be raised. A value of 0
indicates an infinite timeout, i.e. a timeout
error will never be raised. If not provided,
the default value set during initialization
or with ``_set_rpc_timeout`` is used.
Returns:
Returns the result of running ``func`` with ``args`` and ``kwargs``.
Example::
Make sure that ``MASTER_ADDR`` and ``MASTER_PORT`` are set properly
on both workers. Refer to :meth:`~torch.distributed.init_process_group`
API for more details. For example,
>>> export MASTER_ADDR=localhost
>>> export MASTER_PORT=5678
Then run the following code in two different processes:
>>> # On worker 0:
>>> import torch
>>> import torch.distributed.rpc as rpc
>>> rpc.init_rpc("worker0", rank=0, world_size=2)
>>> ret = rpc.rpc_sync("worker1", torch.add, args=(torch.ones(2), 3))
>>> rpc.shutdown()
>>> # On worker 1:
>>> import torch.distributed.rpc as rpc
>>> rpc.init_rpc("worker1", rank=1, world_size=2)
>>> rpc.shutdown()
Below is an example of running a TorchScript function using RPC.
>>> # On both workers:
>>> @torch.jit.script
>>> def my_script_add(t1, t2):
>>> return torch.add(t1, t2)
>>> # On worker 0:
>>> import torch.distributed.rpc as rpc
>>> rpc.init_rpc("worker0", rank=0, world_size=2)
>>> ret = rpc.rpc_sync("worker1", my_script_add, args=(torch.ones(2), 3))
>>> rpc.shutdown()
>>> # On worker 1:
>>> import torch.distributed.rpc as rpc
>>> rpc.init_rpc("worker1", rank=1, world_size=2)
>>> rpc.shutdown() | r"""
Make a blocking RPC call to run function ``func`` on worker ``to``. RPC
messages are sent and received in parallel to execution of Python code. This
method is thread-safe. | [
"r",
"Make",
"a",
"blocking",
"RPC",
"call",
"to",
"run",
"function",
"func",
"on",
"worker",
"to",
".",
"RPC",
"messages",
"are",
"sent",
"and",
"received",
"in",
"parallel",
"to",
"execution",
"of",
"Python",
"code",
".",
"This",
"method",
"is",
"thread",
"-",
"safe",
"."
] | def rpc_sync(to, func, args=None, kwargs=None, timeout=UNSET_RPC_TIMEOUT):
r"""
Make a blocking RPC call to run function ``func`` on worker ``to``. RPC
messages are sent and received in parallel to execution of Python code. This
method is thread-safe.
Args:
to (str or WorkerInfo or int): name/rank/``WorkerInfo`` of the destination worker.
func (callable): a callable function, such as Python callables, builtin
operators (e.g. :meth:`~torch.add`) and annotated
TorchScript functions.
args (tuple): the argument tuple for the ``func`` invocation.
kwargs (dict): is a dictionary of keyword arguments for the ``func``
invocation.
timeout (float, optional): timeout in seconds to use for this RPC. If
the RPC does not complete in this amount of
time, an exception indicating it has
timed out will be raised. A value of 0
indicates an infinite timeout, i.e. a timeout
error will never be raised. If not provided,
the default value set during initialization
or with ``_set_rpc_timeout`` is used.
Returns:
Returns the result of running ``func`` with ``args`` and ``kwargs``.
Example::
Make sure that ``MASTER_ADDR`` and ``MASTER_PORT`` are set properly
on both workers. Refer to :meth:`~torch.distributed.init_process_group`
API for more details. For example,
>>> export MASTER_ADDR=localhost
>>> export MASTER_PORT=5678
Then run the following code in two different processes:
>>> # On worker 0:
>>> import torch
>>> import torch.distributed.rpc as rpc
>>> rpc.init_rpc("worker0", rank=0, world_size=2)
>>> ret = rpc.rpc_sync("worker1", torch.add, args=(torch.ones(2), 3))
>>> rpc.shutdown()
>>> # On worker 1:
>>> import torch.distributed.rpc as rpc
>>> rpc.init_rpc("worker1", rank=1, world_size=2)
>>> rpc.shutdown()
Below is an example of running a TorchScript function using RPC.
>>> # On both workers:
>>> @torch.jit.script
>>> def my_script_add(t1, t2):
>>> return torch.add(t1, t2)
>>> # On worker 0:
>>> import torch.distributed.rpc as rpc
>>> rpc.init_rpc("worker0", rank=0, world_size=2)
>>> ret = rpc.rpc_sync("worker1", my_script_add, args=(torch.ones(2), 3))
>>> rpc.shutdown()
>>> # On worker 1:
>>> import torch.distributed.rpc as rpc
>>> rpc.init_rpc("worker1", rank=1, world_size=2)
>>> rpc.shutdown()
"""
torch._C._log_api_usage_once("torch.distributed.rpc_sync")
fut = _invoke_rpc(to, func, RPCExecMode.SYNC, args, kwargs, timeout)
return fut.wait() | [
"def",
"rpc_sync",
"(",
"to",
",",
"func",
",",
"args",
"=",
"None",
",",
"kwargs",
"=",
"None",
",",
"timeout",
"=",
"UNSET_RPC_TIMEOUT",
")",
":",
"torch",
".",
"_C",
".",
"_log_api_usage_once",
"(",
"\"torch.distributed.rpc_sync\"",
")",
"fut",
"=",
"_invoke_rpc",
"(",
"to",
",",
"func",
",",
"RPCExecMode",
".",
"SYNC",
",",
"args",
",",
"kwargs",
",",
"timeout",
")",
"return",
"fut",
".",
"wait",
"(",
")"
] | https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/distributed/rpc/api.py#L698-L767 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/targets/callconv.py | python | BaseCallConv.get_return_type | (self, ty) | return restype.as_pointer() | Get the actual type of the return argument for Numba type *ty*. | Get the actual type of the return argument for Numba type *ty*. | [
"Get",
"the",
"actual",
"type",
"of",
"the",
"return",
"argument",
"for",
"Numba",
"type",
"*",
"ty",
"*",
"."
] | def get_return_type(self, ty):
"""
Get the actual type of the return argument for Numba type *ty*.
"""
restype = self.context.data_model_manager[ty].get_return_type()
return restype.as_pointer() | [
"def",
"get_return_type",
"(",
"self",
",",
"ty",
")",
":",
"restype",
"=",
"self",
".",
"context",
".",
"data_model_manager",
"[",
"ty",
"]",
".",
"get_return_type",
"(",
")",
"return",
"restype",
".",
"as_pointer",
"(",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/targets/callconv.py#L97-L102 |
|
SFTtech/openage | d6a08c53c48dc1e157807471df92197f6ca9e04d | openage/util/filelike/abstract.py | python | FileLikeObject.writable | (self) | Returns True if write() is allowed. | Returns True if write() is allowed. | [
"Returns",
"True",
"if",
"write",
"()",
"is",
"allowed",
"."
] | def writable(self):
"""
Returns True if write() is allowed.
""" | [
"def",
"writable",
"(",
"self",
")",
":"
] | https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/util/filelike/abstract.py#L50-L53 |
||
Tencent/CMONGO | c40380caa14e05509f46993aa8b8da966b09b0b5 | src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Environment.py | python | Base.Append | (self, **kw) | Append values to existing construction variables
in an Environment. | Append values to existing construction variables
in an Environment. | [
"Append",
"values",
"to",
"existing",
"construction",
"variables",
"in",
"an",
"Environment",
"."
] | def Append(self, **kw):
"""Append values to existing construction variables
in an Environment.
"""
kw = copy_non_reserved_keywords(kw)
for key, val in kw.items():
# It would be easier on the eyes to write this using
# "continue" statements whenever we finish processing an item,
# but Python 1.5.2 apparently doesn't let you use "continue"
# within try:-except: blocks, so we have to nest our code.
try:
if key == 'CPPDEFINES' and SCons.Util.is_String(self._dict[key]):
self._dict[key] = [self._dict[key]]
orig = self._dict[key]
except KeyError:
# No existing variable in the environment, so just set
# it to the new value.
if key == 'CPPDEFINES' and SCons.Util.is_String(val):
self._dict[key] = [val]
else:
self._dict[key] = val
else:
try:
# Check if the original looks like a dictionary.
# If it is, we can't just try adding the value because
# dictionaries don't have __add__() methods, and
# things like UserList will incorrectly coerce the
# original dict to a list (which we don't want).
update_dict = orig.update
except AttributeError:
try:
# Most straightforward: just try to add them
# together. This will work in most cases, when the
# original and new values are of compatible types.
self._dict[key] = orig + val
except (KeyError, TypeError):
try:
# Check if the original is a list.
add_to_orig = orig.append
except AttributeError:
# The original isn't a list, but the new
# value is (by process of elimination),
# so insert the original in the new value
# (if there's one to insert) and replace
# the variable with it.
if orig:
val.insert(0, orig)
self._dict[key] = val
else:
# The original is a list, so append the new
# value to it (if there's a value to append).
if val:
add_to_orig(val)
else:
# The original looks like a dictionary, so update it
# based on what we think the value looks like.
if SCons.Util.is_List(val):
if key == 'CPPDEFINES':
tmp = []
for (k, v) in orig.iteritems():
if v is not None:
tmp.append((k, v))
else:
tmp.append((k,))
orig = tmp
orig += val
self._dict[key] = orig
else:
for v in val:
orig[v] = None
else:
try:
update_dict(val)
except (AttributeError, TypeError, ValueError):
if SCons.Util.is_Dict(val):
for k, v in val.items():
orig[k] = v
else:
orig[val] = None
self.scanner_map_delete(kw) | [
"def",
"Append",
"(",
"self",
",",
"*",
"*",
"kw",
")",
":",
"kw",
"=",
"copy_non_reserved_keywords",
"(",
"kw",
")",
"for",
"key",
",",
"val",
"in",
"kw",
".",
"items",
"(",
")",
":",
"# It would be easier on the eyes to write this using",
"# \"continue\" statements whenever we finish processing an item,",
"# but Python 1.5.2 apparently doesn't let you use \"continue\"",
"# within try:-except: blocks, so we have to nest our code.",
"try",
":",
"if",
"key",
"==",
"'CPPDEFINES'",
"and",
"SCons",
".",
"Util",
".",
"is_String",
"(",
"self",
".",
"_dict",
"[",
"key",
"]",
")",
":",
"self",
".",
"_dict",
"[",
"key",
"]",
"=",
"[",
"self",
".",
"_dict",
"[",
"key",
"]",
"]",
"orig",
"=",
"self",
".",
"_dict",
"[",
"key",
"]",
"except",
"KeyError",
":",
"# No existing variable in the environment, so just set",
"# it to the new value.",
"if",
"key",
"==",
"'CPPDEFINES'",
"and",
"SCons",
".",
"Util",
".",
"is_String",
"(",
"val",
")",
":",
"self",
".",
"_dict",
"[",
"key",
"]",
"=",
"[",
"val",
"]",
"else",
":",
"self",
".",
"_dict",
"[",
"key",
"]",
"=",
"val",
"else",
":",
"try",
":",
"# Check if the original looks like a dictionary.",
"# If it is, we can't just try adding the value because",
"# dictionaries don't have __add__() methods, and",
"# things like UserList will incorrectly coerce the",
"# original dict to a list (which we don't want).",
"update_dict",
"=",
"orig",
".",
"update",
"except",
"AttributeError",
":",
"try",
":",
"# Most straightforward: just try to add them",
"# together. This will work in most cases, when the",
"# original and new values are of compatible types.",
"self",
".",
"_dict",
"[",
"key",
"]",
"=",
"orig",
"+",
"val",
"except",
"(",
"KeyError",
",",
"TypeError",
")",
":",
"try",
":",
"# Check if the original is a list.",
"add_to_orig",
"=",
"orig",
".",
"append",
"except",
"AttributeError",
":",
"# The original isn't a list, but the new",
"# value is (by process of elimination),",
"# so insert the original in the new value",
"# (if there's one to insert) and replace",
"# the variable with it.",
"if",
"orig",
":",
"val",
".",
"insert",
"(",
"0",
",",
"orig",
")",
"self",
".",
"_dict",
"[",
"key",
"]",
"=",
"val",
"else",
":",
"# The original is a list, so append the new",
"# value to it (if there's a value to append).",
"if",
"val",
":",
"add_to_orig",
"(",
"val",
")",
"else",
":",
"# The original looks like a dictionary, so update it",
"# based on what we think the value looks like.",
"if",
"SCons",
".",
"Util",
".",
"is_List",
"(",
"val",
")",
":",
"if",
"key",
"==",
"'CPPDEFINES'",
":",
"tmp",
"=",
"[",
"]",
"for",
"(",
"k",
",",
"v",
")",
"in",
"orig",
".",
"iteritems",
"(",
")",
":",
"if",
"v",
"is",
"not",
"None",
":",
"tmp",
".",
"append",
"(",
"(",
"k",
",",
"v",
")",
")",
"else",
":",
"tmp",
".",
"append",
"(",
"(",
"k",
",",
")",
")",
"orig",
"=",
"tmp",
"orig",
"+=",
"val",
"self",
".",
"_dict",
"[",
"key",
"]",
"=",
"orig",
"else",
":",
"for",
"v",
"in",
"val",
":",
"orig",
"[",
"v",
"]",
"=",
"None",
"else",
":",
"try",
":",
"update_dict",
"(",
"val",
")",
"except",
"(",
"AttributeError",
",",
"TypeError",
",",
"ValueError",
")",
":",
"if",
"SCons",
".",
"Util",
".",
"is_Dict",
"(",
"val",
")",
":",
"for",
"k",
",",
"v",
"in",
"val",
".",
"items",
"(",
")",
":",
"orig",
"[",
"k",
"]",
"=",
"v",
"else",
":",
"orig",
"[",
"val",
"]",
"=",
"None",
"self",
".",
"scanner_map_delete",
"(",
"kw",
")"
] | https://github.com/Tencent/CMONGO/blob/c40380caa14e05509f46993aa8b8da966b09b0b5/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Environment.py#L1129-L1208 |
||
Tencent/Pebble | 68315f176d9e328a233ace29b7579a829f89879f | tools/blade/src/blade/command_args.py | python | CmdArguments._add_clean_arguments | (self, parser) | Add clean arguments for parser. | Add clean arguments for parser. | [
"Add",
"clean",
"arguments",
"for",
"parser",
"."
] | def _add_clean_arguments(self, parser):
"""Add clean arguments for parser. """
self.__add_plat_profile_arguments(parser)
self.__add_generate_arguments(parser)
self.__add_color_arguments(parser) | [
"def",
"_add_clean_arguments",
"(",
"self",
",",
"parser",
")",
":",
"self",
".",
"__add_plat_profile_arguments",
"(",
"parser",
")",
"self",
".",
"__add_generate_arguments",
"(",
"parser",
")",
"self",
".",
"__add_color_arguments",
"(",
"parser",
")"
] | https://github.com/Tencent/Pebble/blob/68315f176d9e328a233ace29b7579a829f89879f/tools/blade/src/blade/command_args.py#L291-L295 |
||
BestSonny/SSTD | 174d452189f6bf9cf4b6957719392008bd974069 | python/caffe/net_spec.py | python | Top.to_proto | (self) | return to_proto(self) | Generate a NetParameter that contains all layers needed to compute
this top. | Generate a NetParameter that contains all layers needed to compute
this top. | [
"Generate",
"a",
"NetParameter",
"that",
"contains",
"all",
"layers",
"needed",
"to",
"compute",
"this",
"top",
"."
] | def to_proto(self):
"""Generate a NetParameter that contains all layers needed to compute
this top."""
return to_proto(self) | [
"def",
"to_proto",
"(",
"self",
")",
":",
"return",
"to_proto",
"(",
"self",
")"
] | https://github.com/BestSonny/SSTD/blob/174d452189f6bf9cf4b6957719392008bd974069/python/caffe/net_spec.py#L90-L94 |
|
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/contexts/fitting_contexts/general_fitting_context.py | python | GeneralFittingContext.global_parameters | (self) | return self._global_parameters | Returns the global parameters stored in the model. | Returns the global parameters stored in the model. | [
"Returns",
"the",
"global",
"parameters",
"stored",
"in",
"the",
"model",
"."
] | def global_parameters(self) -> list:
"""Returns the global parameters stored in the model."""
return self._global_parameters | [
"def",
"global_parameters",
"(",
"self",
")",
"->",
"list",
":",
"return",
"self",
".",
"_global_parameters"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/contexts/fitting_contexts/general_fitting_context.py#L94-L96 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/agw/aui/auibar.py | python | AuiToolBar.GetToolSticky | (self, tool_id) | return item.sticky | Returns whether the toolbar item identified by `tool_id` has a sticky behaviour or not.
:param integer `tool_id`: the :class:`AuiToolBarItem` identifier. | Returns whether the toolbar item identified by `tool_id` has a sticky behaviour or not. | [
"Returns",
"whether",
"the",
"toolbar",
"item",
"identified",
"by",
"tool_id",
"has",
"a",
"sticky",
"behaviour",
"or",
"not",
"."
] | def GetToolSticky(self, tool_id):
"""
Returns whether the toolbar item identified by `tool_id` has a sticky behaviour or not.
:param integer `tool_id`: the :class:`AuiToolBarItem` identifier.
"""
item = self.FindTool(tool_id)
if not item:
return
return item.sticky | [
"def",
"GetToolSticky",
"(",
"self",
",",
"tool_id",
")",
":",
"item",
"=",
"self",
".",
"FindTool",
"(",
"tool_id",
")",
"if",
"not",
"item",
":",
"return",
"return",
"item",
".",
"sticky"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/aui/auibar.py#L2337-L2348 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/agw/balloontip.py | python | BalloonTip.GetBalloonMessage | (self) | return self._message | Returns the :class:`BalloonTip` tip message.
:return: A string containing the main message. | Returns the :class:`BalloonTip` tip message. | [
"Returns",
"the",
":",
"class",
":",
"BalloonTip",
"tip",
"message",
"."
] | def GetBalloonMessage(self):
"""
Returns the :class:`BalloonTip` tip message.
:return: A string containing the main message.
"""
return self._message | [
"def",
"GetBalloonMessage",
"(",
"self",
")",
":",
"return",
"self",
".",
"_message"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/balloontip.py#L894-L901 |
|
apple/swift-lldb | d74be846ef3e62de946df343e8c234bde93a8912 | scripts/Python/static-binding/lldb.py | python | SBTarget.GetModuleAtIndex | (self, idx) | return _lldb.SBTarget_GetModuleAtIndex(self, idx) | GetModuleAtIndex(SBTarget self, uint32_t idx) -> SBModule | GetModuleAtIndex(SBTarget self, uint32_t idx) -> SBModule | [
"GetModuleAtIndex",
"(",
"SBTarget",
"self",
"uint32_t",
"idx",
")",
"-",
">",
"SBModule"
] | def GetModuleAtIndex(self, idx):
"""GetModuleAtIndex(SBTarget self, uint32_t idx) -> SBModule"""
return _lldb.SBTarget_GetModuleAtIndex(self, idx) | [
"def",
"GetModuleAtIndex",
"(",
"self",
",",
"idx",
")",
":",
"return",
"_lldb",
".",
"SBTarget_GetModuleAtIndex",
"(",
"self",
",",
"idx",
")"
] | https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L10628-L10630 |
|
leela-zero/leela-zero | e3ed6310d33d75078ba74c3adf887d18439fc2e3 | scripts/cpplint.py | python | NestingState.InNamespaceBody | (self) | return self.stack and isinstance(self.stack[-1], _NamespaceInfo) | Check if we are currently one level inside a namespace body.
Returns:
True if top of the stack is a namespace block, False otherwise. | Check if we are currently one level inside a namespace body. | [
"Check",
"if",
"we",
"are",
"currently",
"one",
"level",
"inside",
"a",
"namespace",
"body",
"."
] | def InNamespaceBody(self):
"""Check if we are currently one level inside a namespace body.
Returns:
True if top of the stack is a namespace block, False otherwise.
"""
return self.stack and isinstance(self.stack[-1], _NamespaceInfo) | [
"def",
"InNamespaceBody",
"(",
"self",
")",
":",
"return",
"self",
".",
"stack",
"and",
"isinstance",
"(",
"self",
".",
"stack",
"[",
"-",
"1",
"]",
",",
"_NamespaceInfo",
")"
] | https://github.com/leela-zero/leela-zero/blob/e3ed6310d33d75078ba74c3adf887d18439fc2e3/scripts/cpplint.py#L2234-L2240 |
|
pristineio/webrtc-mirror | 7a5bcdffaab90a05bc1146b2b1ea71c004e54d71 | PRESUBMIT.py | python | _VerifyNativeApiHeadersListIsValid | (input_api, output_api) | return [] | Ensures the list of native API header directories is up to date. | Ensures the list of native API header directories is up to date. | [
"Ensures",
"the",
"list",
"of",
"native",
"API",
"header",
"directories",
"is",
"up",
"to",
"date",
"."
] | def _VerifyNativeApiHeadersListIsValid(input_api, output_api):
"""Ensures the list of native API header directories is up to date."""
non_existing_paths = []
native_api_full_paths = [
input_api.os_path.join(input_api.PresubmitLocalPath(),
*path.split('/')) for path in API_DIRS]
for path in native_api_full_paths:
if not os.path.isdir(path):
non_existing_paths.append(path)
if non_existing_paths:
return [output_api.PresubmitError(
'Directories to native API headers have changed which has made the '
'list in PRESUBMIT.py outdated.\nPlease update it to the current '
'location of our native APIs.',
non_existing_paths)]
return [] | [
"def",
"_VerifyNativeApiHeadersListIsValid",
"(",
"input_api",
",",
"output_api",
")",
":",
"non_existing_paths",
"=",
"[",
"]",
"native_api_full_paths",
"=",
"[",
"input_api",
".",
"os_path",
".",
"join",
"(",
"input_api",
".",
"PresubmitLocalPath",
"(",
")",
",",
"*",
"path",
".",
"split",
"(",
"'/'",
")",
")",
"for",
"path",
"in",
"API_DIRS",
"]",
"for",
"path",
"in",
"native_api_full_paths",
":",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"non_existing_paths",
".",
"append",
"(",
"path",
")",
"if",
"non_existing_paths",
":",
"return",
"[",
"output_api",
".",
"PresubmitError",
"(",
"'Directories to native API headers have changed which has made the '",
"'list in PRESUBMIT.py outdated.\\nPlease update it to the current '",
"'location of our native APIs.'",
",",
"non_existing_paths",
")",
"]",
"return",
"[",
"]"
] | https://github.com/pristineio/webrtc-mirror/blob/7a5bcdffaab90a05bc1146b2b1ea71c004e54d71/PRESUBMIT.py#L109-L124 |
|
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/coverage/coverage/data.py | python | CoverageData._has_arcs | (self) | return self._arcs is not None | Do we have data in self._arcs? | Do we have data in self._arcs? | [
"Do",
"we",
"have",
"data",
"in",
"self",
".",
"_arcs?"
] | def _has_arcs(self):
"""Do we have data in self._arcs?"""
return self._arcs is not None | [
"def",
"_has_arcs",
"(",
"self",
")",
":",
"return",
"self",
".",
"_arcs",
"is",
"not",
"None"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/coverage/coverage/data.py#L598-L600 |
|
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/gsutil/third_party/boto/boto/cloudsearch/optionstatus.py | python | OptionStatus.wait_for_state | (self, state) | Performs polling of CloudSearch to wait for the ``state``
of this object to change to the provided state. | Performs polling of CloudSearch to wait for the ``state``
of this object to change to the provided state. | [
"Performs",
"polling",
"of",
"CloudSearch",
"to",
"wait",
"for",
"the",
"state",
"of",
"this",
"object",
"to",
"change",
"to",
"the",
"provided",
"state",
"."
] | def wait_for_state(self, state):
"""
Performs polling of CloudSearch to wait for the ``state``
of this object to change to the provided state.
"""
while self.state != state:
time.sleep(5)
self.refresh() | [
"def",
"wait_for_state",
"(",
"self",
",",
"state",
")",
":",
"while",
"self",
".",
"state",
"!=",
"state",
":",
"time",
".",
"sleep",
"(",
"5",
")",
"self",
".",
"refresh",
"(",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/cloudsearch/optionstatus.py#L115-L122 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/gzip.py | python | GzipFile.fileno | (self) | return self.fileobj.fileno() | Invoke the underlying file object's fileno() method.
This will raise AttributeError if the underlying file object
doesn't support fileno(). | Invoke the underlying file object's fileno() method. | [
"Invoke",
"the",
"underlying",
"file",
"object",
"s",
"fileno",
"()",
"method",
"."
] | def fileno(self):
"""Invoke the underlying file object's fileno() method.
This will raise AttributeError if the underlying file object
doesn't support fileno().
"""
return self.fileobj.fileno() | [
"def",
"fileno",
"(",
"self",
")",
":",
"return",
"self",
".",
"fileobj",
".",
"fileno",
"(",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/gzip.py#L339-L345 |
|
snap-stanford/snap-python | d53c51b0a26aa7e3e7400b014cdf728948fde80a | setup/snap.py | python | PNEANet.HasFlag | (self, *args) | return _snap.PNEANet_HasFlag(self, *args) | HasFlag(PNEANet self, TGraphFlag const & Flag) -> bool
Parameters:
Flag: TGraphFlag const & | HasFlag(PNEANet self, TGraphFlag const & Flag) -> bool | [
"HasFlag",
"(",
"PNEANet",
"self",
"TGraphFlag",
"const",
"&",
"Flag",
")",
"-",
">",
"bool"
] | def HasFlag(self, *args):
"""
HasFlag(PNEANet self, TGraphFlag const & Flag) -> bool
Parameters:
Flag: TGraphFlag const &
"""
return _snap.PNEANet_HasFlag(self, *args) | [
"def",
"HasFlag",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_snap",
".",
"PNEANet_HasFlag",
"(",
"self",
",",
"*",
"args",
")"
] | https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L23110-L23118 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/_windows.py | python | VarScrollHelperBase.UpdateScrollbar | (*args, **kwargs) | return _windows_.VarScrollHelperBase_UpdateScrollbar(*args, **kwargs) | UpdateScrollbar(self) | UpdateScrollbar(self) | [
"UpdateScrollbar",
"(",
"self",
")"
] | def UpdateScrollbar(*args, **kwargs):
"""UpdateScrollbar(self)"""
return _windows_.VarScrollHelperBase_UpdateScrollbar(*args, **kwargs) | [
"def",
"UpdateScrollbar",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"VarScrollHelperBase_UpdateScrollbar",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_windows.py#L2238-L2240 |
|
CRYTEK/CRYENGINE | 232227c59a220cbbd311576f0fbeba7bb53b2a8c | Editor/Python/windows/Lib/site-packages/pip/_vendor/requests/models.py | python | Response.__nonzero__ | (self) | return self.ok | Returns true if :attr:`status_code` is 'OK'. | Returns true if :attr:`status_code` is 'OK'. | [
"Returns",
"true",
"if",
":",
"attr",
":",
"status_code",
"is",
"OK",
"."
] | def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok | [
"def",
"__nonzero__",
"(",
"self",
")",
":",
"return",
"self",
".",
"ok"
] | https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/pip/_vendor/requests/models.py#L626-L628 |
|
SpectrumIM/spectrum2 | d5e8851daec94a43f7d70627df1ea44b230a44e0 | plugin/python/NetworkPlugin.py | python | NetworkPlugin.handleLogoutRequest | (self, user, legacyName) | Called when XMPP user wants to disconnect legacy network.
You should disconnect him from legacy network.
@param user: XMPP JID of user for which this event occurs.
@param legacyName: Legacy network name of this user used for login. | Called when XMPP user wants to disconnect legacy network.
You should disconnect him from legacy network. | [
"Called",
"when",
"XMPP",
"user",
"wants",
"to",
"disconnect",
"legacy",
"network",
".",
"You",
"should",
"disconnect",
"him",
"from",
"legacy",
"network",
"."
] | def handleLogoutRequest(self, user, legacyName):
"""
Called when XMPP user wants to disconnect legacy network.
You should disconnect him from legacy network.
@param user: XMPP JID of user for which this event occurs.
@param legacyName: Legacy network name of this user used for login.
"""
raise NotImplementedError, "Implement me" | [
"def",
"handleLogoutRequest",
"(",
"self",
",",
"user",
",",
"legacyName",
")",
":",
"raise",
"NotImplementedError",
",",
"\"Implement me\""
] | https://github.com/SpectrumIM/spectrum2/blob/d5e8851daec94a43f7d70627df1ea44b230a44e0/plugin/python/NetworkPlugin.py#L419-L427 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/logging/__init__.py | python | NullHandler.emit | (self, record) | Stub. | Stub. | [
"Stub",
"."
] | def emit(self, record):
"""Stub.""" | [
"def",
"emit",
"(",
"self",
",",
"record",
")",
":"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/logging/__init__.py#L2070-L2071 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.