nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
list | function
stringlengths 34
151k
| function_tokens
list | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/propgrid.py | python | PGProperty.OnMeasureImage | (*args, **kwargs) | return _propgrid.PGProperty_OnMeasureImage(*args, **kwargs) | OnMeasureImage(self, int item=-1) -> Size | OnMeasureImage(self, int item=-1) -> Size | [
"OnMeasureImage",
"(",
"self",
"int",
"item",
"=",
"-",
"1",
")",
"-",
">",
"Size"
]
| def OnMeasureImage(*args, **kwargs):
"""OnMeasureImage(self, int item=-1) -> Size"""
return _propgrid.PGProperty_OnMeasureImage(*args, **kwargs) | [
"def",
"OnMeasureImage",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_propgrid",
".",
"PGProperty_OnMeasureImage",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/propgrid.py#L380-L382 |
|
facebook/hermes | b1b1a00ab468ec1b397b31b71587110044830970 | tools/hbc-attribute/accumulate.py | python | Accumulation.insertLeaf | (self, dic, obj) | Registers an allocation unit as being used by a group. | Registers an allocation unit as being used by a group. | [
"Registers",
"an",
"allocation",
"unit",
"as",
"being",
"used",
"by",
"a",
"group",
"."
]
| def insertLeaf(self, dic, obj):
"""Registers an allocation unit as being used by a group."""
for key in obj["type"].split(":"):
assert LEAFNODE not in dic
dic = getOrAssignEmpty(dic, key)
dic[LEAFNODE] = True
if obj["dedupKey"] in dic:
# All units from different sources should be the same size
assert obj["size"] == dic[obj["dedupKey"]]
dic[obj["dedupKey"]] = obj["size"] | [
"def",
"insertLeaf",
"(",
"self",
",",
"dic",
",",
"obj",
")",
":",
"for",
"key",
"in",
"obj",
"[",
"\"type\"",
"]",
".",
"split",
"(",
"\":\"",
")",
":",
"assert",
"LEAFNODE",
"not",
"in",
"dic",
"dic",
"=",
"getOrAssignEmpty",
"(",
"dic",
",",
"key",
")",
"dic",
"[",
"LEAFNODE",
"]",
"=",
"True",
"if",
"obj",
"[",
"\"dedupKey\"",
"]",
"in",
"dic",
":",
"# All units from different sources should be the same size",
"assert",
"obj",
"[",
"\"size\"",
"]",
"==",
"dic",
"[",
"obj",
"[",
"\"dedupKey\"",
"]",
"]",
"dic",
"[",
"obj",
"[",
"\"dedupKey\"",
"]",
"]",
"=",
"obj",
"[",
"\"size\"",
"]"
]
| https://github.com/facebook/hermes/blob/b1b1a00ab468ec1b397b31b71587110044830970/tools/hbc-attribute/accumulate.py#L59-L71 |
||
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | configure.py | python | get_python_major_version | (python_bin_path) | return run_shell([python_bin_path, '-c', 'import sys; print(sys.version[0])']) | Get the python major version. | Get the python major version. | [
"Get",
"the",
"python",
"major",
"version",
"."
]
| def get_python_major_version(python_bin_path):
"""Get the python major version."""
return run_shell([python_bin_path, '-c', 'import sys; print(sys.version[0])']) | [
"def",
"get_python_major_version",
"(",
"python_bin_path",
")",
":",
"return",
"run_shell",
"(",
"[",
"python_bin_path",
",",
"'-c'",
",",
"'import sys; print(sys.version[0])'",
"]",
")"
]
| https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/configure.py#L174-L176 |
|
Jittor/jittor | e9aca0444c2bdc8e2389d99122954cd0903eec46 | python/jittor/transform/__init__.py | python | RandomResizedCrop.get_params | (img, scale, ratio) | return i, j, h, w | Get parameters for ``crop`` for a random sized crop.
Args:
img (PIL Image): Image to be cropped.
scale (tuple): range of size of the origin size cropped
ratio (tuple): range of aspect ratio of the origin aspect ratio cropped
Returns:
tuple: params (i, j, h, w) to be passed to ``crop`` for a random
sized crop. | Get parameters for ``crop`` for a random sized crop. | [
"Get",
"parameters",
"for",
"crop",
"for",
"a",
"random",
"sized",
"crop",
"."
]
| def get_params(img, scale, ratio):
"""Get parameters for ``crop`` for a random sized crop.
Args:
img (PIL Image): Image to be cropped.
scale (tuple): range of size of the origin size cropped
ratio (tuple): range of aspect ratio of the origin aspect ratio cropped
Returns:
tuple: params (i, j, h, w) to be passed to ``crop`` for a random
sized crop.
"""
width, height = _get_image_size(img)
area = height * width
for attempt in range(10):
target_area = random.uniform(*scale) * area
log_ratio = (math.log(ratio[0]), math.log(ratio[1]))
aspect_ratio = math.exp(random.uniform(*log_ratio))
w = int(round(math.sqrt(target_area * aspect_ratio)))
h = int(round(math.sqrt(target_area / aspect_ratio)))
if 0 < w <= width and 0 < h <= height:
i = random.randint(0, height - h)
j = random.randint(0, width - w)
return i, j, h, w
# Fallback to central crop
in_ratio = float(width) / float(height)
if (in_ratio < min(ratio)):
w = width
h = int(round(w / min(ratio)))
elif (in_ratio > max(ratio)):
h = height
w = int(round(h * max(ratio)))
else: # whole image
w = width
h = height
i = (height - h) // 2
j = (width - w) // 2
return i, j, h, w | [
"def",
"get_params",
"(",
"img",
",",
"scale",
",",
"ratio",
")",
":",
"width",
",",
"height",
"=",
"_get_image_size",
"(",
"img",
")",
"area",
"=",
"height",
"*",
"width",
"for",
"attempt",
"in",
"range",
"(",
"10",
")",
":",
"target_area",
"=",
"random",
".",
"uniform",
"(",
"*",
"scale",
")",
"*",
"area",
"log_ratio",
"=",
"(",
"math",
".",
"log",
"(",
"ratio",
"[",
"0",
"]",
")",
",",
"math",
".",
"log",
"(",
"ratio",
"[",
"1",
"]",
")",
")",
"aspect_ratio",
"=",
"math",
".",
"exp",
"(",
"random",
".",
"uniform",
"(",
"*",
"log_ratio",
")",
")",
"w",
"=",
"int",
"(",
"round",
"(",
"math",
".",
"sqrt",
"(",
"target_area",
"*",
"aspect_ratio",
")",
")",
")",
"h",
"=",
"int",
"(",
"round",
"(",
"math",
".",
"sqrt",
"(",
"target_area",
"/",
"aspect_ratio",
")",
")",
")",
"if",
"0",
"<",
"w",
"<=",
"width",
"and",
"0",
"<",
"h",
"<=",
"height",
":",
"i",
"=",
"random",
".",
"randint",
"(",
"0",
",",
"height",
"-",
"h",
")",
"j",
"=",
"random",
".",
"randint",
"(",
"0",
",",
"width",
"-",
"w",
")",
"return",
"i",
",",
"j",
",",
"h",
",",
"w",
"# Fallback to central crop",
"in_ratio",
"=",
"float",
"(",
"width",
")",
"/",
"float",
"(",
"height",
")",
"if",
"(",
"in_ratio",
"<",
"min",
"(",
"ratio",
")",
")",
":",
"w",
"=",
"width",
"h",
"=",
"int",
"(",
"round",
"(",
"w",
"/",
"min",
"(",
"ratio",
")",
")",
")",
"elif",
"(",
"in_ratio",
">",
"max",
"(",
"ratio",
")",
")",
":",
"h",
"=",
"height",
"w",
"=",
"int",
"(",
"round",
"(",
"h",
"*",
"max",
"(",
"ratio",
")",
")",
")",
"else",
":",
"# whole image",
"w",
"=",
"width",
"h",
"=",
"height",
"i",
"=",
"(",
"height",
"-",
"h",
")",
"//",
"2",
"j",
"=",
"(",
"width",
"-",
"w",
")",
"//",
"2",
"return",
"i",
",",
"j",
",",
"h",
",",
"w"
]
| https://github.com/Jittor/jittor/blob/e9aca0444c2bdc8e2389d99122954cd0903eec46/python/jittor/transform/__init__.py#L1102-L1143 |
|
ricardoquesada/Spidermonkey | 4a75ea2543408bd1b2c515aa95901523eeef7858 | media/webrtc/trunk/tools/gyp/pylib/gyp/msvs_emulation.py | python | MsvsSettings.GetIdlBuildData | (self, source, config) | return outdir, output, variables, flags | Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required. | Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required. | [
"Determine",
"the",
"implicit",
"outputs",
"for",
"an",
"idl",
"file",
".",
"Returns",
"output",
"directory",
"outputs",
"and",
"variables",
"and",
"flags",
"that",
"are",
"required",
"."
]
| def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._RealConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default),
config=config)
tlb = midl('TypeLibraryName', default='${root}.tlb')
header = midl('HeaderFileName', default='${root}.h')
dlldata = midl('DLLDataFileName', default='dlldata.c')
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
proxy = midl('ProxyFileName', default='${root}_p.c')
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl('OutputDirectory', default='')
output = [header, dlldata, iid, proxy]
variables = [('tlb', tlb),
('h', header),
('dlldata', dlldata),
('iid', iid),
('proxy', proxy)]
# TODO(scottmg): Are there configuration settings to set these flags?
flags = ['/char', 'signed', '/env', 'win32', '/Oicf']
return outdir, output, variables, flags | [
"def",
"GetIdlBuildData",
"(",
"self",
",",
"source",
",",
"config",
")",
":",
"config",
"=",
"self",
".",
"_RealConfig",
"(",
"config",
")",
"midl_get",
"=",
"self",
".",
"_GetWrapper",
"(",
"self",
",",
"self",
".",
"msvs_settings",
"[",
"config",
"]",
",",
"'VCMIDLTool'",
")",
"def",
"midl",
"(",
"name",
",",
"default",
"=",
"None",
")",
":",
"return",
"self",
".",
"ConvertVSMacros",
"(",
"midl_get",
"(",
"name",
",",
"default",
"=",
"default",
")",
",",
"config",
"=",
"config",
")",
"tlb",
"=",
"midl",
"(",
"'TypeLibraryName'",
",",
"default",
"=",
"'${root}.tlb'",
")",
"header",
"=",
"midl",
"(",
"'HeaderFileName'",
",",
"default",
"=",
"'${root}.h'",
")",
"dlldata",
"=",
"midl",
"(",
"'DLLDataFileName'",
",",
"default",
"=",
"'dlldata.c'",
")",
"iid",
"=",
"midl",
"(",
"'InterfaceIdentifierFileName'",
",",
"default",
"=",
"'${root}_i.c'",
")",
"proxy",
"=",
"midl",
"(",
"'ProxyFileName'",
",",
"default",
"=",
"'${root}_p.c'",
")",
"# Note that .tlb is not included in the outputs as it is not always",
"# generated depending on the content of the input idl file.",
"outdir",
"=",
"midl",
"(",
"'OutputDirectory'",
",",
"default",
"=",
"''",
")",
"output",
"=",
"[",
"header",
",",
"dlldata",
",",
"iid",
",",
"proxy",
"]",
"variables",
"=",
"[",
"(",
"'tlb'",
",",
"tlb",
")",
",",
"(",
"'h'",
",",
"header",
")",
",",
"(",
"'dlldata'",
",",
"dlldata",
")",
",",
"(",
"'iid'",
",",
"iid",
")",
",",
"(",
"'proxy'",
",",
"proxy",
")",
"]",
"# TODO(scottmg): Are there configuration settings to set these flags?",
"flags",
"=",
"[",
"'/char'",
",",
"'signed'",
",",
"'/env'",
",",
"'win32'",
",",
"'/Oicf'",
"]",
"return",
"outdir",
",",
"output",
",",
"variables",
",",
"flags"
]
| https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/media/webrtc/trunk/tools/gyp/pylib/gyp/msvs_emulation.py#L521-L545 |
|
apple/swift-lldb | d74be846ef3e62de946df343e8c234bde93a8912 | third_party/Python/module/pexpect-4.6/pexpect/screen.py | python | screen.erase_down | (self) | Erases the screen from the current line down to the bottom of the
screen. | Erases the screen from the current line down to the bottom of the
screen. | [
"Erases",
"the",
"screen",
"from",
"the",
"current",
"line",
"down",
"to",
"the",
"bottom",
"of",
"the",
"screen",
"."
]
| def erase_down (self): # <ESC>[0J -or- <ESC>[J
'''Erases the screen from the current line down to the bottom of the
screen.'''
self.erase_end_of_line ()
self.fill_region (self.cur_r + 1, 1, self.rows, self.cols) | [
"def",
"erase_down",
"(",
"self",
")",
":",
"# <ESC>[0J -or- <ESC>[J",
"self",
".",
"erase_end_of_line",
"(",
")",
"self",
".",
"fill_region",
"(",
"self",
".",
"cur_r",
"+",
"1",
",",
"1",
",",
"self",
".",
"rows",
",",
"self",
".",
"cols",
")"
]
| https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/third_party/Python/module/pexpect-4.6/pexpect/screen.py#L393-L398 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_core.py | python | Window.GetClientSize | (*args, **kwargs) | return _core_.Window_GetClientSize(*args, **kwargs) | GetClientSize(self) -> Size
This gets the size of the window's 'client area' in pixels. The client
area is the area which may be drawn on by the programmer, excluding
title bar, border, scrollbars, etc. | GetClientSize(self) -> Size | [
"GetClientSize",
"(",
"self",
")",
"-",
">",
"Size"
]
| def GetClientSize(*args, **kwargs):
"""
GetClientSize(self) -> Size
This gets the size of the window's 'client area' in pixels. The client
area is the area which may be drawn on by the programmer, excluding
title bar, border, scrollbars, etc.
"""
return _core_.Window_GetClientSize(*args, **kwargs) | [
"def",
"GetClientSize",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Window_GetClientSize",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L9525-L9533 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/grid.py | python | Grid.UnsetSortingColumn | (*args, **kwargs) | return _grid.Grid_UnsetSortingColumn(*args, **kwargs) | UnsetSortingColumn(self) | UnsetSortingColumn(self) | [
"UnsetSortingColumn",
"(",
"self",
")"
]
| def UnsetSortingColumn(*args, **kwargs):
"""UnsetSortingColumn(self)"""
return _grid.Grid_UnsetSortingColumn(*args, **kwargs) | [
"def",
"UnsetSortingColumn",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"Grid_UnsetSortingColumn",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/grid.py#L2185-L2187 |
|
CMU-Perceptual-Computing-Lab/MonocularTotalCapture | f69a12ead1abc412bcb14bb066ec3a60292d6cf3 | POF/utils/general.py | python | crop_image_from_xy | (image, crop_location, crop_size, scale=1.0) | Crops an image. When factor is not given does an central crop.
Inputs:
image: 4D tensor, [batch, height, width, channels] which will be cropped in height and width dimension
crop_location: tensor, [batch, 2] which represent the height and width location of the crop
crop_size: int, describes the extension of the crop
Outputs:
image_crop: 4D tensor, [batch, crop_size, crop_size, channels] | Crops an image. When factor is not given does an central crop. | [
"Crops",
"an",
"image",
".",
"When",
"factor",
"is",
"not",
"given",
"does",
"an",
"central",
"crop",
"."
]
| def crop_image_from_xy(image, crop_location, crop_size, scale=1.0):
"""
Crops an image. When factor is not given does an central crop.
Inputs:
image: 4D tensor, [batch, height, width, channels] which will be cropped in height and width dimension
crop_location: tensor, [batch, 2] which represent the height and width location of the crop
crop_size: int, describes the extension of the crop
Outputs:
image_crop: 4D tensor, [batch, crop_size, crop_size, channels]
"""
with tf.name_scope('crop_image_from_xy'):
s = image.get_shape().as_list()
assert len(s) == 4, "Image needs to be of shape [batch, width, height, channel]"
scale = tf.reshape(scale, [-1])
crop_location = tf.cast(crop_location, tf.float32)
crop_location = tf.reshape(crop_location, [s[0], 2])
crop_size = tf.cast(crop_size, tf.float32)
crop_size_scaled = crop_size / scale
y1 = crop_location[:, 0] - crop_size_scaled // 2
y2 = y1 + crop_size_scaled
x1 = crop_location[:, 1] - crop_size_scaled // 2
x2 = x1 + crop_size_scaled
y1 /= s[1]
y2 /= s[1]
x1 /= s[2]
x2 /= s[2]
boxes = tf.stack([y1, x1, y2, x2], -1)
crop_size = tf.cast(tf.stack([crop_size, crop_size]), tf.int32)
box_ind = tf.range(s[0])
image_c = tf.image.crop_and_resize(tf.cast(image, tf.float32), boxes, box_ind, crop_size, name='crop')
return image_c | [
"def",
"crop_image_from_xy",
"(",
"image",
",",
"crop_location",
",",
"crop_size",
",",
"scale",
"=",
"1.0",
")",
":",
"with",
"tf",
".",
"name_scope",
"(",
"'crop_image_from_xy'",
")",
":",
"s",
"=",
"image",
".",
"get_shape",
"(",
")",
".",
"as_list",
"(",
")",
"assert",
"len",
"(",
"s",
")",
"==",
"4",
",",
"\"Image needs to be of shape [batch, width, height, channel]\"",
"scale",
"=",
"tf",
".",
"reshape",
"(",
"scale",
",",
"[",
"-",
"1",
"]",
")",
"crop_location",
"=",
"tf",
".",
"cast",
"(",
"crop_location",
",",
"tf",
".",
"float32",
")",
"crop_location",
"=",
"tf",
".",
"reshape",
"(",
"crop_location",
",",
"[",
"s",
"[",
"0",
"]",
",",
"2",
"]",
")",
"crop_size",
"=",
"tf",
".",
"cast",
"(",
"crop_size",
",",
"tf",
".",
"float32",
")",
"crop_size_scaled",
"=",
"crop_size",
"/",
"scale",
"y1",
"=",
"crop_location",
"[",
":",
",",
"0",
"]",
"-",
"crop_size_scaled",
"//",
"2",
"y2",
"=",
"y1",
"+",
"crop_size_scaled",
"x1",
"=",
"crop_location",
"[",
":",
",",
"1",
"]",
"-",
"crop_size_scaled",
"//",
"2",
"x2",
"=",
"x1",
"+",
"crop_size_scaled",
"y1",
"/=",
"s",
"[",
"1",
"]",
"y2",
"/=",
"s",
"[",
"1",
"]",
"x1",
"/=",
"s",
"[",
"2",
"]",
"x2",
"/=",
"s",
"[",
"2",
"]",
"boxes",
"=",
"tf",
".",
"stack",
"(",
"[",
"y1",
",",
"x1",
",",
"y2",
",",
"x2",
"]",
",",
"-",
"1",
")",
"crop_size",
"=",
"tf",
".",
"cast",
"(",
"tf",
".",
"stack",
"(",
"[",
"crop_size",
",",
"crop_size",
"]",
")",
",",
"tf",
".",
"int32",
")",
"box_ind",
"=",
"tf",
".",
"range",
"(",
"s",
"[",
"0",
"]",
")",
"image_c",
"=",
"tf",
".",
"image",
".",
"crop_and_resize",
"(",
"tf",
".",
"cast",
"(",
"image",
",",
"tf",
".",
"float32",
")",
",",
"boxes",
",",
"box_ind",
",",
"crop_size",
",",
"name",
"=",
"'crop'",
")",
"return",
"image_c"
]
| https://github.com/CMU-Perceptual-Computing-Lab/MonocularTotalCapture/blob/f69a12ead1abc412bcb14bb066ec3a60292d6cf3/POF/utils/general.py#L60-L93 |
||
krishauser/Klampt | 972cc83ea5befac3f653c1ba20f80155768ad519 | Python/klampt/robotsim.py | python | Simulator.getState | (self) | return _robotsim.Simulator_getState(self) | r"""
Gets the current simulation state, including controller parameters, etc.
Returns:
A Base64 string representing the binary data for the state | r"""
Gets the current simulation state, including controller parameters, etc. | [
"r",
"Gets",
"the",
"current",
"simulation",
"state",
"including",
"controller",
"parameters",
"etc",
"."
]
| def getState(self) ->str:
r"""
Gets the current simulation state, including controller parameters, etc.
Returns:
A Base64 string representing the binary data for the state
"""
return _robotsim.Simulator_getState(self) | [
"def",
"getState",
"(",
"self",
")",
"->",
"str",
":",
"return",
"_robotsim",
".",
"Simulator_getState",
"(",
"self",
")"
]
| https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/klampt/robotsim.py#L7822-L7832 |
|
wesnoth/wesnoth | 6ccac5a5e8ff75303c9190c0da60580925cb32c0 | data/tools/wesnoth/libgithub.py | python | Addon.sync_from | (self, src, exclude) | return len(status) > 0 | Synchronises add-on from another directory.
src: Directory with new add-on version.
exclude: List of files to ignore.
Returns whether anything changed.
Raises libgithub.Error if the checkout is not clean. | Synchronises add-on from another directory. | [
"Synchronises",
"add",
"-",
"on",
"from",
"another",
"directory",
"."
]
| def sync_from(self, src, exclude):
"""Synchronises add-on from another directory.
src: Directory with new add-on version.
exclude: List of files to ignore.
Returns whether anything changed.
Raises libgithub.Error if the checkout is not clean.
"""
logging.debug("Syncing add-on {0} from add-on server ({1})".format(self.name, src))
status = self._status()
if status:
raise AddonError(self.name, "Checkout is not clean:\n{0}".format("\n".join(status)))
self._rmtree(".", exclude)
#actual copying
self._copytree(src, self.get_dir(), ignore=lambda src, names: [n for n in names if n in exclude])
self._execute(["git", "add", "."], check_error=True)
status = self._status()
return len(status) > 0 | [
"def",
"sync_from",
"(",
"self",
",",
"src",
",",
"exclude",
")",
":",
"logging",
".",
"debug",
"(",
"\"Syncing add-on {0} from add-on server ({1})\"",
".",
"format",
"(",
"self",
".",
"name",
",",
"src",
")",
")",
"status",
"=",
"self",
".",
"_status",
"(",
")",
"if",
"status",
":",
"raise",
"AddonError",
"(",
"self",
".",
"name",
",",
"\"Checkout is not clean:\\n{0}\"",
".",
"format",
"(",
"\"\\n\"",
".",
"join",
"(",
"status",
")",
")",
")",
"self",
".",
"_rmtree",
"(",
"\".\"",
",",
"exclude",
")",
"#actual copying",
"self",
".",
"_copytree",
"(",
"src",
",",
"self",
".",
"get_dir",
"(",
")",
",",
"ignore",
"=",
"lambda",
"src",
",",
"names",
":",
"[",
"n",
"for",
"n",
"in",
"names",
"if",
"n",
"in",
"exclude",
"]",
")",
"self",
".",
"_execute",
"(",
"[",
"\"git\"",
",",
"\"add\"",
",",
"\".\"",
"]",
",",
"check_error",
"=",
"True",
")",
"status",
"=",
"self",
".",
"_status",
"(",
")",
"return",
"len",
"(",
"status",
")",
">",
"0"
]
| https://github.com/wesnoth/wesnoth/blob/6ccac5a5e8ff75303c9190c0da60580925cb32c0/data/tools/wesnoth/libgithub.py#L149-L169 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/_core.py | python | SizerItemList_iterator.next | (*args, **kwargs) | return _core_.SizerItemList_iterator_next(*args, **kwargs) | next(self) -> SizerItem | next(self) -> SizerItem | [
"next",
"(",
"self",
")",
"-",
">",
"SizerItem"
]
| def next(*args, **kwargs):
"""next(self) -> SizerItem"""
return _core_.SizerItemList_iterator_next(*args, **kwargs) | [
"def",
"next",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"SizerItemList_iterator_next",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L13966-L13968 |
|
netket/netket | 0d534e54ecbf25b677ea72af6b85947979420652 | netket/jax/_vmap_chunked.py | python | vmap_chunked | (f: Callable, in_axes=0, *, chunk_size: Optional[int]) | return _chunk_vmapped_function(vmapped_fun, chunk_size, argnums) | Behaves like jax.vmap but uses scan to chunk the computations in smaller chunks. | Behaves like jax.vmap but uses scan to chunk the computations in smaller chunks. | [
"Behaves",
"like",
"jax",
".",
"vmap",
"but",
"uses",
"scan",
"to",
"chunk",
"the",
"computations",
"in",
"smaller",
"chunks",
"."
]
| def vmap_chunked(f: Callable, in_axes=0, *, chunk_size: Optional[int]):
"""
Behaves like jax.vmap but uses scan to chunk the computations in smaller chunks.
"""
if isinstance(in_axes, int):
in_axes = (in_axes,)
if not set(in_axes).issubset((0, None)):
raise NotImplementedError("Only in_axes 0/None are currently supported")
argnums = tuple(
map(lambda ix: ix[0], filter(lambda ix: ix[1] is not None, enumerate(in_axes)))
)
vmapped_fun = jax.vmap(f, in_axes=in_axes)
return _chunk_vmapped_function(vmapped_fun, chunk_size, argnums) | [
"def",
"vmap_chunked",
"(",
"f",
":",
"Callable",
",",
"in_axes",
"=",
"0",
",",
"*",
",",
"chunk_size",
":",
"Optional",
"[",
"int",
"]",
")",
":",
"if",
"isinstance",
"(",
"in_axes",
",",
"int",
")",
":",
"in_axes",
"=",
"(",
"in_axes",
",",
")",
"if",
"not",
"set",
"(",
"in_axes",
")",
".",
"issubset",
"(",
"(",
"0",
",",
"None",
")",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"Only in_axes 0/None are currently supported\"",
")",
"argnums",
"=",
"tuple",
"(",
"map",
"(",
"lambda",
"ix",
":",
"ix",
"[",
"0",
"]",
",",
"filter",
"(",
"lambda",
"ix",
":",
"ix",
"[",
"1",
"]",
"is",
"not",
"None",
",",
"enumerate",
"(",
"in_axes",
")",
")",
")",
")",
"vmapped_fun",
"=",
"jax",
".",
"vmap",
"(",
"f",
",",
"in_axes",
"=",
"in_axes",
")",
"return",
"_chunk_vmapped_function",
"(",
"vmapped_fun",
",",
"chunk_size",
",",
"argnums",
")"
]
| https://github.com/netket/netket/blob/0d534e54ecbf25b677ea72af6b85947979420652/netket/jax/_vmap_chunked.py#L60-L76 |
|
microsoft/TSS.MSR | 0f2516fca2cd9929c31d5450e39301c9bde43688 | TSS.Py/src/TpmTypes.py | python | TPM2_MakeCredential_REQUEST.__init__ | (self, handle = TPM_HANDLE(), credential = None, objectName = None) | This command allows the TPM to perform the actions required of a
Certificate Authority (CA) in creating a TPM2B_ID_OBJECT containing an
activation credential.
Attributes:
handle (TPM_HANDLE): Loaded public area, used to encrypt the
sensitive area containing the credential key
Auth Index: None
credential (bytes): The credential information
objectName (bytes): Name of the object to which the credential applies | This command allows the TPM to perform the actions required of a
Certificate Authority (CA) in creating a TPM2B_ID_OBJECT containing an
activation credential. | [
"This",
"command",
"allows",
"the",
"TPM",
"to",
"perform",
"the",
"actions",
"required",
"of",
"a",
"Certificate",
"Authority",
"(",
"CA",
")",
"in",
"creating",
"a",
"TPM2B_ID_OBJECT",
"containing",
"an",
"activation",
"credential",
"."
]
| def __init__(self, handle = TPM_HANDLE(), credential = None, objectName = None):
""" This command allows the TPM to perform the actions required of a
Certificate Authority (CA) in creating a TPM2B_ID_OBJECT containing an
activation credential.
Attributes:
handle (TPM_HANDLE): Loaded public area, used to encrypt the
sensitive area containing the credential key
Auth Index: None
credential (bytes): The credential information
objectName (bytes): Name of the object to which the credential applies
"""
self.handle = handle
self.credential = credential
self.objectName = objectName | [
"def",
"__init__",
"(",
"self",
",",
"handle",
"=",
"TPM_HANDLE",
"(",
")",
",",
"credential",
"=",
"None",
",",
"objectName",
"=",
"None",
")",
":",
"self",
".",
"handle",
"=",
"handle",
"self",
".",
"credential",
"=",
"credential",
"self",
".",
"objectName",
"=",
"objectName"
]
| https://github.com/microsoft/TSS.MSR/blob/0f2516fca2cd9929c31d5450e39301c9bde43688/TSS.Py/src/TpmTypes.py#L9920-L9934 |
||
vmtk/vmtk | 927331ad752265199390eabbbf2e07cdc2b4bcc6 | vmtkScripts/contrib/vmtknumpyreader.py | python | vmtkNumpyReader.ReadHDF5File | (self) | Load a dictionary from an HDF5 file. | Load a dictionary from an HDF5 file. | [
"Load",
"a",
"dictionary",
"from",
"an",
"HDF5",
"file",
"."
]
| def ReadHDF5File(self):
"""
Load a dictionary from an HDF5 file.
"""
try:
import h5py
except ImportError:
self.PrintError('ImportError: Unable to Write to hdf5. h5py module not installed')
raise ImportError('Unable to Write to hdf5. h5py module not installed')
def recursively_load_dict_contents_from_group(h5file, path):
"""
Load contents of an HDF5 group. If further groups are encountered,
treat them like dicts and continue to load them recursively.
"""
ans = {}
for key, item in h5file[path].items():
if isinstance(item, h5py._hl.dataset.Dataset):
ans[key] = item.value
elif isinstance(item, h5py._hl.group.Group):
ans[key] = recursively_load_dict_contents_from_group(h5file, path + key + '/')
return ans
with h5py.File(self.InputFileName, 'r') as h5file:
self.ArrayDict = recursively_load_dict_contents_from_group(h5file, '/') | [
"def",
"ReadHDF5File",
"(",
"self",
")",
":",
"try",
":",
"import",
"h5py",
"except",
"ImportError",
":",
"self",
".",
"PrintError",
"(",
"'ImportError: Unable to Write to hdf5. h5py module not installed'",
")",
"raise",
"ImportError",
"(",
"'Unable to Write to hdf5. h5py module not installed'",
")",
"def",
"recursively_load_dict_contents_from_group",
"(",
"h5file",
",",
"path",
")",
":",
"\"\"\"\n Load contents of an HDF5 group. If further groups are encountered,\n treat them like dicts and continue to load them recursively.\n \"\"\"",
"ans",
"=",
"{",
"}",
"for",
"key",
",",
"item",
"in",
"h5file",
"[",
"path",
"]",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"item",
",",
"h5py",
".",
"_hl",
".",
"dataset",
".",
"Dataset",
")",
":",
"ans",
"[",
"key",
"]",
"=",
"item",
".",
"value",
"elif",
"isinstance",
"(",
"item",
",",
"h5py",
".",
"_hl",
".",
"group",
".",
"Group",
")",
":",
"ans",
"[",
"key",
"]",
"=",
"recursively_load_dict_contents_from_group",
"(",
"h5file",
",",
"path",
"+",
"key",
"+",
"'/'",
")",
"return",
"ans",
"with",
"h5py",
".",
"File",
"(",
"self",
".",
"InputFileName",
",",
"'r'",
")",
"as",
"h5file",
":",
"self",
".",
"ArrayDict",
"=",
"recursively_load_dict_contents_from_group",
"(",
"h5file",
",",
"'/'",
")"
]
| https://github.com/vmtk/vmtk/blob/927331ad752265199390eabbbf2e07cdc2b4bcc6/vmtkScripts/contrib/vmtknumpyreader.py#L61-L86 |
||
msftguy/ssh-rd | a5f3a79daeac5844edebf01916c9613563f1c390 | _3rd/boost_1_48_0/tools/build/v2/build/feature.py | python | get | (name) | return __all_features[name] | Return the Feature instance for the specified name.
Throws if no feature by such name exists | Return the Feature instance for the specified name. | [
"Return",
"the",
"Feature",
"instance",
"for",
"the",
"specified",
"name",
"."
]
| def get(name):
"""Return the Feature instance for the specified name.
Throws if no feature by such name exists
"""
return __all_features[name] | [
"def",
"get",
"(",
"name",
")",
":",
"return",
"__all_features",
"[",
"name",
"]"
]
| https://github.com/msftguy/ssh-rd/blob/a5f3a79daeac5844edebf01916c9613563f1c390/_3rd/boost_1_48_0/tools/build/v2/build/feature.py#L144-L149 |
|
BlzFans/wke | b0fa21158312e40c5fbd84682d643022b6c34a93 | cygwin/lib/python2.6/rfc822.py | python | AddrlistClass.getquote | (self) | return self.getdelimited('"', '"\r', 0) | Get a quote-delimited fragment from self's field. | Get a quote-delimited fragment from self's field. | [
"Get",
"a",
"quote",
"-",
"delimited",
"fragment",
"from",
"self",
"s",
"field",
"."
]
| def getquote(self):
"""Get a quote-delimited fragment from self's field."""
return self.getdelimited('"', '"\r', 0) | [
"def",
"getquote",
"(",
"self",
")",
":",
"return",
"self",
".",
"getdelimited",
"(",
"'\"'",
",",
"'\"\\r'",
",",
"0",
")"
]
| https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/rfc822.py#L716-L718 |
|
ycm-core/ycmd | fc0fb7e5e15176cc5a2a30c80956335988c6b59a | ycmd/completers/cs/cs_completer.py | python | CsharpSolutionCompleter.ServerIsReady | ( self ) | Check if our OmniSharp server is ready (loaded solution file). | Check if our OmniSharp server is ready (loaded solution file). | [
"Check",
"if",
"our",
"OmniSharp",
"server",
"is",
"ready",
"(",
"loaded",
"solution",
"file",
")",
"."
]
| def ServerIsReady( self ):
""" Check if our OmniSharp server is ready (loaded solution file)."""
if not self._ServerIsRunning():
return False
try:
return self._GetResponse( '/checkreadystatus', timeout = .2 )
except Exception:
return False | [
"def",
"ServerIsReady",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_ServerIsRunning",
"(",
")",
":",
"return",
"False",
"try",
":",
"return",
"self",
".",
"_GetResponse",
"(",
"'/checkreadystatus'",
",",
"timeout",
"=",
".2",
")",
"except",
"Exception",
":",
"return",
"False"
]
| https://github.com/ycm-core/ycmd/blob/fc0fb7e5e15176cc5a2a30c80956335988c6b59a/ycmd/completers/cs/cs_completer.py#L846-L854 |
||
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | tools/perf/page_sets/pregenerated_profile_shared_state.py | python | PregeneratedProfileSharedState._MigratePregeneratedProfile | (self) | Migrates the pre-generated profile by launching Chrome with it.
On success, updates self._migrated_profile and
self._finder_options.browser_options.profile_dir with the directory of the
migrated profile. | Migrates the pre-generated profile by launching Chrome with it. | [
"Migrates",
"the",
"pre",
"-",
"generated",
"profile",
"by",
"launching",
"Chrome",
"with",
"it",
"."
]
| def _MigratePregeneratedProfile(self):
"""Migrates the pre-generated profile by launching Chrome with it.
On success, updates self._migrated_profile and
self._finder_options.browser_options.profile_dir with the directory of the
migrated profile.
"""
self._migrated_profile = tempfile.mkdtemp()
logging.info("Starting migration of pre-generated profile to %s",
self._migrated_profile)
pregenerated_profile = self._finder_options.browser_options.profile_dir
possible_browser = self._FindBrowser(self._finder_options)
self._MigrateProfile(self._finder_options, possible_browser,
pregenerated_profile, self._migrated_profile)
self._finder_options.browser_options.profile_dir = self._migrated_profile
logging.info("Finished migration of pre-generated profile to %s",
self._migrated_profile) | [
"def",
"_MigratePregeneratedProfile",
"(",
"self",
")",
":",
"self",
".",
"_migrated_profile",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"logging",
".",
"info",
"(",
"\"Starting migration of pre-generated profile to %s\"",
",",
"self",
".",
"_migrated_profile",
")",
"pregenerated_profile",
"=",
"self",
".",
"_finder_options",
".",
"browser_options",
".",
"profile_dir",
"possible_browser",
"=",
"self",
".",
"_FindBrowser",
"(",
"self",
".",
"_finder_options",
")",
"self",
".",
"_MigrateProfile",
"(",
"self",
".",
"_finder_options",
",",
"possible_browser",
",",
"pregenerated_profile",
",",
"self",
".",
"_migrated_profile",
")",
"self",
".",
"_finder_options",
".",
"browser_options",
".",
"profile_dir",
"=",
"self",
".",
"_migrated_profile",
"logging",
".",
"info",
"(",
"\"Finished migration of pre-generated profile to %s\"",
",",
"self",
".",
"_migrated_profile",
")"
]
| https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/perf/page_sets/pregenerated_profile_shared_state.py#L140-L157 |
||
bigtreetech/BIGTREETECH-SKR-V1.3 | b238aa402753e81d551b7d34a181a262a138ae9e | BTT SKR V1.4/Firmware/Marlin-bugfix-2.0.x-SKR-V1.4/buildroot/share/scripts/createTemperatureLookupMarlin.py | python | Thermistor.adc | (self, temp) | return (r / (self.rp + r)) * ARES | Convert temperature into a ADC reading | Convert temperature into a ADC reading | [
"Convert",
"temperature",
"into",
"a",
"ADC",
"reading"
]
| def adc(self, temp):
"Convert temperature into a ADC reading"
x = (self.c1 - (1.0 / (temp+ZERO))) / (2*self.c3)
y = sqrt((self.c2 / (3*self.c3))**3 + x**2)
r = exp((y-x)**(1.0/3) - (y+x)**(1.0/3))
return (r / (self.rp + r)) * ARES | [
"def",
"adc",
"(",
"self",
",",
"temp",
")",
":",
"x",
"=",
"(",
"self",
".",
"c1",
"-",
"(",
"1.0",
"/",
"(",
"temp",
"+",
"ZERO",
")",
")",
")",
"/",
"(",
"2",
"*",
"self",
".",
"c3",
")",
"y",
"=",
"sqrt",
"(",
"(",
"self",
".",
"c2",
"/",
"(",
"3",
"*",
"self",
".",
"c3",
")",
")",
"**",
"3",
"+",
"x",
"**",
"2",
")",
"r",
"=",
"exp",
"(",
"(",
"y",
"-",
"x",
")",
"**",
"(",
"1.0",
"/",
"3",
")",
"-",
"(",
"y",
"+",
"x",
")",
"**",
"(",
"1.0",
"/",
"3",
")",
")",
"return",
"(",
"r",
"/",
"(",
"self",
".",
"rp",
"+",
"r",
")",
")",
"*",
"ARES"
]
| https://github.com/bigtreetech/BIGTREETECH-SKR-V1.3/blob/b238aa402753e81d551b7d34a181a262a138ae9e/BTT SKR V1.4/Firmware/Marlin-bugfix-2.0.x-SKR-V1.4/buildroot/share/scripts/createTemperatureLookupMarlin.py#L82-L87 |
|
kamyu104/LeetCode-Solutions | 77605708a927ea3b85aee5a479db733938c7c211 | Python/number-of-pairs-of-strings-with-concatenation-equal-to-target.py | python | Solution2.numOfPairs | (self, nums, target) | return result | :type nums: List[str]
:type target: str
:rtype: int | :type nums: List[str]
:type target: str
:rtype: int | [
":",
"type",
"nums",
":",
"List",
"[",
"str",
"]",
":",
"type",
"target",
":",
"str",
":",
"rtype",
":",
"int"
]
| def numOfPairs(self, nums, target):
"""
:type nums: List[str]
:type target: str
:rtype: int
"""
prefix, suffix = collections.Counter(), collections.Counter()
result = 0
for num in nums:
if target.startswith(num):
result += suffix[len(target)-len(num)]
if target.endswith(num):
result += prefix[len(target)-len(num)]
if target.startswith(num):
prefix[len(num)] += 1
if target.endswith(num):
suffix[len(num)] += 1
return result | [
"def",
"numOfPairs",
"(",
"self",
",",
"nums",
",",
"target",
")",
":",
"prefix",
",",
"suffix",
"=",
"collections",
".",
"Counter",
"(",
")",
",",
"collections",
".",
"Counter",
"(",
")",
"result",
"=",
"0",
"for",
"num",
"in",
"nums",
":",
"if",
"target",
".",
"startswith",
"(",
"num",
")",
":",
"result",
"+=",
"suffix",
"[",
"len",
"(",
"target",
")",
"-",
"len",
"(",
"num",
")",
"]",
"if",
"target",
".",
"endswith",
"(",
"num",
")",
":",
"result",
"+=",
"prefix",
"[",
"len",
"(",
"target",
")",
"-",
"len",
"(",
"num",
")",
"]",
"if",
"target",
".",
"startswith",
"(",
"num",
")",
":",
"prefix",
"[",
"len",
"(",
"num",
")",
"]",
"+=",
"1",
"if",
"target",
".",
"endswith",
"(",
"num",
")",
":",
"suffix",
"[",
"len",
"(",
"num",
")",
"]",
"+=",
"1",
"return",
"result"
]
| https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/number-of-pairs-of-strings-with-concatenation-equal-to-target.py#L33-L50 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/_collections_abc.py | python | Coroutine.close | (self) | Raise GeneratorExit inside coroutine. | Raise GeneratorExit inside coroutine. | [
"Raise",
"GeneratorExit",
"inside",
"coroutine",
"."
]
| def close(self):
"""Raise GeneratorExit inside coroutine.
"""
try:
self.throw(GeneratorExit)
except (GeneratorExit, StopIteration):
pass
else:
raise RuntimeError("coroutine ignored GeneratorExit") | [
"def",
"close",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"throw",
"(",
"GeneratorExit",
")",
"except",
"(",
"GeneratorExit",
",",
"StopIteration",
")",
":",
"pass",
"else",
":",
"raise",
"RuntimeError",
"(",
"\"coroutine ignored GeneratorExit\"",
")"
]
| https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/_collections_abc.py#L138-L146 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/urllib3/util/retry.py | python | Retry._is_method_retryable | (self, method) | return True | Checks if a given HTTP method should be retried upon, depending if
it is included on the method whitelist. | Checks if a given HTTP method should be retried upon, depending if
it is included on the method whitelist. | [
"Checks",
"if",
"a",
"given",
"HTTP",
"method",
"should",
"be",
"retried",
"upon",
"depending",
"if",
"it",
"is",
"included",
"on",
"the",
"method",
"whitelist",
"."
]
| def _is_method_retryable(self, method):
""" Checks if a given HTTP method should be retried upon, depending if
it is included on the method whitelist.
"""
if self.method_whitelist and method.upper() not in self.method_whitelist:
return False
return True | [
"def",
"_is_method_retryable",
"(",
"self",
",",
"method",
")",
":",
"if",
"self",
".",
"method_whitelist",
"and",
"method",
".",
"upper",
"(",
")",
"not",
"in",
"self",
".",
"method_whitelist",
":",
"return",
"False",
"return",
"True"
]
| https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/urllib3/util/retry.py#L285-L292 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/dtypes/dtypes.py | python | Registry.register | (self, dtype: Type[ExtensionDtype]) | Parameters
----------
dtype : ExtensionDtype | Parameters
----------
dtype : ExtensionDtype | [
"Parameters",
"----------",
"dtype",
":",
"ExtensionDtype"
]
| def register(self, dtype: Type[ExtensionDtype]) -> None:
"""
Parameters
----------
dtype : ExtensionDtype
"""
if not issubclass(dtype, ExtensionDtype):
raise ValueError("can only register pandas extension dtypes")
self.dtypes.append(dtype) | [
"def",
"register",
"(",
"self",
",",
"dtype",
":",
"Type",
"[",
"ExtensionDtype",
"]",
")",
"->",
"None",
":",
"if",
"not",
"issubclass",
"(",
"dtype",
",",
"ExtensionDtype",
")",
":",
"raise",
"ValueError",
"(",
"\"can only register pandas extension dtypes\"",
")",
"self",
".",
"dtypes",
".",
"append",
"(",
"dtype",
")"
]
| https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/dtypes/dtypes.py#L64-L73 |
||
intel/llvm | e6d0547e9d99b5a56430c4749f6c7e328bf221ab | clang/bindings/python/clang/cindex.py | python | CompilationDatabase.getAllCompileCommands | (self) | return conf.lib.clang_CompilationDatabase_getAllCompileCommands(self) | Get an iterable object providing all the CompileCommands available from
the database. | Get an iterable object providing all the CompileCommands available from
the database. | [
"Get",
"an",
"iterable",
"object",
"providing",
"all",
"the",
"CompileCommands",
"available",
"from",
"the",
"database",
"."
]
| def getAllCompileCommands(self):
"""
Get an iterable object providing all the CompileCommands available from
the database.
"""
return conf.lib.clang_CompilationDatabase_getAllCompileCommands(self) | [
"def",
"getAllCompileCommands",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_CompilationDatabase_getAllCompileCommands",
"(",
"self",
")"
]
| https://github.com/intel/llvm/blob/e6d0547e9d99b5a56430c4749f6c7e328bf221ab/clang/bindings/python/clang/cindex.py#L3265-L3270 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_controls.py | python | TextAttr.HasBackgroundColour | (*args, **kwargs) | return _controls_.TextAttr_HasBackgroundColour(*args, **kwargs) | HasBackgroundColour(self) -> bool | HasBackgroundColour(self) -> bool | [
"HasBackgroundColour",
"(",
"self",
")",
"-",
">",
"bool"
]
| def HasBackgroundColour(*args, **kwargs):
"""HasBackgroundColour(self) -> bool"""
return _controls_.TextAttr_HasBackgroundColour(*args, **kwargs) | [
"def",
"HasBackgroundColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"TextAttr_HasBackgroundColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_controls.py#L1772-L1774 |
|
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/ops/_op_impl/tbe/depthwise_conv2d_backprop_filter.py | python | _depthwise_conv2d_backprop_filter_tbe | () | return | DepthwiseConv2DBackpropFilter TBE register | DepthwiseConv2DBackpropFilter TBE register | [
"DepthwiseConv2DBackpropFilter",
"TBE",
"register"
]
| def _depthwise_conv2d_backprop_filter_tbe():
"""DepthwiseConv2DBackpropFilter TBE register"""
return | [
"def",
"_depthwise_conv2d_backprop_filter_tbe",
"(",
")",
":",
"return"
]
| https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/_op_impl/tbe/depthwise_conv2d_backprop_filter.py#L39-L41 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/tools/Editra/src/syntax/_edje.py | python | SyntaxData.GetProperties | (self) | return [FOLD, FOLD_PRE] | Returns a list of Extra Properties to set | Returns a list of Extra Properties to set | [
"Returns",
"a",
"list",
"of",
"Extra",
"Properties",
"to",
"set"
]
| def GetProperties(self):
"""Returns a list of Extra Properties to set """
return [FOLD, FOLD_PRE] | [
"def",
"GetProperties",
"(",
"self",
")",
":",
"return",
"[",
"FOLD",
",",
"FOLD_PRE",
"]"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/syntax/_edje.py#L95-L97 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/_windows.py | python | Panel.__init__ | (self, *args, **kwargs) | __init__(self, Window parent, int id=-1, Point pos=DefaultPosition,
Size size=DefaultSize, long style=wxTAB_TRAVERSAL|wxNO_BORDER,
String name=PanelNameStr) -> Panel | __init__(self, Window parent, int id=-1, Point pos=DefaultPosition,
Size size=DefaultSize, long style=wxTAB_TRAVERSAL|wxNO_BORDER,
String name=PanelNameStr) -> Panel | [
"__init__",
"(",
"self",
"Window",
"parent",
"int",
"id",
"=",
"-",
"1",
"Point",
"pos",
"=",
"DefaultPosition",
"Size",
"size",
"=",
"DefaultSize",
"long",
"style",
"=",
"wxTAB_TRAVERSAL|wxNO_BORDER",
"String",
"name",
"=",
"PanelNameStr",
")",
"-",
">",
"Panel"
]
| def __init__(self, *args, **kwargs):
"""
__init__(self, Window parent, int id=-1, Point pos=DefaultPosition,
Size size=DefaultSize, long style=wxTAB_TRAVERSAL|wxNO_BORDER,
String name=PanelNameStr) -> Panel
"""
_windows_.Panel_swiginit(self,_windows_.new_Panel(*args, **kwargs))
self._setOORInfo(self) | [
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"_windows_",
".",
"Panel_swiginit",
"(",
"self",
",",
"_windows_",
".",
"new_Panel",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")",
"self",
".",
"_setOORInfo",
"(",
"self",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_windows.py#L62-L69 |
||
eclipse/sumo | 7132a9b8b6eea734bdec38479026b4d8c4336d03 | tools/contributed/sumopy/coremodules/network/network.py | python | Edges.get_distances | (self, id_mode=0, is_check_lanes=False, is_precise=True, modeconst_excl=0.0, modeconst_mix=0.0) | return dists | Returns distances for all edges.
The returned array represents the distance that corresponds to
edge IDs.
If is_check_lanes is True, then the lane speeds are considered where
the respective mode is allowed.
If not allowed on a particular edge,
then the respective edge distance is nan. | Returns distances for all edges.
The returned array represents the distance that corresponds to
edge IDs. | [
"Returns",
"distances",
"for",
"all",
"edges",
".",
"The",
"returned",
"array",
"represents",
"the",
"distance",
"that",
"corresponds",
"to",
"edge",
"IDs",
"."
]
| def get_distances(self, id_mode=0, is_check_lanes=False, is_precise=True, modeconst_excl=0.0, modeconst_mix=0.0):
"""
Returns distances for all edges.
The returned array represents the distance that corresponds to
edge IDs.
If is_check_lanes is True, then the lane speeds are considered where
the respective mode is allowed.
If not allowed on a particular edge,
then the respective edge distance is nan.
"""
print 'get_distances id_mode,is_check_lanes,speed_max', id_mode, is_check_lanes, is_precise
ids_edge = self.get_ids()
dists = np.zeros(np.max(ids_edge)+1, np.float32)
#speeds = self.speeds_max[ids_edge]
# if speed_max is not None:
# speeds = np.clip(speeds, 0.0, speed_max)
#
# elif id_mode is not None:
# # limit allowed speeds with max speeds of mode
# speeds = np.clip(speeds, 0.0, self.parent.modes.speeds_max[id_mode])
if not is_precise:
radii = self.ids_fromnode.get_linktab().radii
dists[ids_edge] = radii[self.ids_fromnode[ids_edge]] + \
self.lengths[ids_edge] + radii[self.ids_tonode[ids_edge]]
else:
coords = self.parent.nodes.coords
for id_edge, shape, coord_fromnode, coord_tonode in zip(
ids_edge, self.shapes[ids_edge],
coords[self.ids_fromnode[ids_edge]],
coords[self.ids_tonode[ids_edge]]):
# print ' id_edge',id_edge,'len %.1f'%(self.lengths[id_edge]),'d_from %.2f'%(np.sqrt(np.sum( (shape[0]-coord_fromnode)**2 )) ),'d_to %.2f'%(np.sqrt(np.sum((shape[-1]-coord_tonode)**2)) )
dists[id_edge] = np.sum((shape[0]-coord_fromnode)**2) + np.sum((shape[-1]-coord_tonode)**2)
#dists[id_edge] += np.sum((shape[-1]-coord_tonode)**2)
# for id_edge, dist2, length in zip(ids_edge, dists[ids_edge], self.lengths[ids_edge]):
# print ' id_edge',id_edge,'len %.1f'%(self.lengths[id_edge]),'d_node %.2f'%(np.sqrt(dist2))
dists[ids_edge] = np.sqrt(dists[ids_edge])+self.lengths[ids_edge]
ids_lanes = self.ids_lanes
if is_check_lanes & (id_mode > 0): # mode 0 can pass everywhere
#get_laneindex_allowed = self.parent.lanes.get_laneindex_allowed
get_accesslevel = self.parent.lanes.get_accesslevel
has_noaccess = np.nan
for id_edge in ids_edge:
if get_accesslevel(ids_lanes[id_edge], id_mode) == -1:
dists[id_edge] = has_noaccess
accesslevel = get_accesslevel(ids_lanes[id_edge], id_mode)
if accesslevel == -1:
dists[id_edge] = has_noaccess
elif accesslevel == 2:
dists[id_edge] = max(dists[id_edge] + modeconst_excl, 0)
elif accesslevel == 1:
dists[id_edge] = max(dists[id_edge] + modeconst_mix, 0)
return dists | [
"def",
"get_distances",
"(",
"self",
",",
"id_mode",
"=",
"0",
",",
"is_check_lanes",
"=",
"False",
",",
"is_precise",
"=",
"True",
",",
"modeconst_excl",
"=",
"0.0",
",",
"modeconst_mix",
"=",
"0.0",
")",
":",
"print",
"'get_distances id_mode,is_check_lanes,speed_max'",
",",
"id_mode",
",",
"is_check_lanes",
",",
"is_precise",
"ids_edge",
"=",
"self",
".",
"get_ids",
"(",
")",
"dists",
"=",
"np",
".",
"zeros",
"(",
"np",
".",
"max",
"(",
"ids_edge",
")",
"+",
"1",
",",
"np",
".",
"float32",
")",
"#speeds = self.speeds_max[ids_edge]",
"# if speed_max is not None:",
"# speeds = np.clip(speeds, 0.0, speed_max)",
"#",
"# elif id_mode is not None:",
"# # limit allowed speeds with max speeds of mode",
"# speeds = np.clip(speeds, 0.0, self.parent.modes.speeds_max[id_mode])",
"if",
"not",
"is_precise",
":",
"radii",
"=",
"self",
".",
"ids_fromnode",
".",
"get_linktab",
"(",
")",
".",
"radii",
"dists",
"[",
"ids_edge",
"]",
"=",
"radii",
"[",
"self",
".",
"ids_fromnode",
"[",
"ids_edge",
"]",
"]",
"+",
"self",
".",
"lengths",
"[",
"ids_edge",
"]",
"+",
"radii",
"[",
"self",
".",
"ids_tonode",
"[",
"ids_edge",
"]",
"]",
"else",
":",
"coords",
"=",
"self",
".",
"parent",
".",
"nodes",
".",
"coords",
"for",
"id_edge",
",",
"shape",
",",
"coord_fromnode",
",",
"coord_tonode",
"in",
"zip",
"(",
"ids_edge",
",",
"self",
".",
"shapes",
"[",
"ids_edge",
"]",
",",
"coords",
"[",
"self",
".",
"ids_fromnode",
"[",
"ids_edge",
"]",
"]",
",",
"coords",
"[",
"self",
".",
"ids_tonode",
"[",
"ids_edge",
"]",
"]",
")",
":",
"# print ' id_edge',id_edge,'len %.1f'%(self.lengths[id_edge]),'d_from %.2f'%(np.sqrt(np.sum( (shape[0]-coord_fromnode)**2 )) ),'d_to %.2f'%(np.sqrt(np.sum((shape[-1]-coord_tonode)**2)) )",
"dists",
"[",
"id_edge",
"]",
"=",
"np",
".",
"sum",
"(",
"(",
"shape",
"[",
"0",
"]",
"-",
"coord_fromnode",
")",
"**",
"2",
")",
"+",
"np",
".",
"sum",
"(",
"(",
"shape",
"[",
"-",
"1",
"]",
"-",
"coord_tonode",
")",
"**",
"2",
")",
"#dists[id_edge] += np.sum((shape[-1]-coord_tonode)**2)",
"# for id_edge, dist2, length in zip(ids_edge, dists[ids_edge], self.lengths[ids_edge]):",
"# print ' id_edge',id_edge,'len %.1f'%(self.lengths[id_edge]),'d_node %.2f'%(np.sqrt(dist2))",
"dists",
"[",
"ids_edge",
"]",
"=",
"np",
".",
"sqrt",
"(",
"dists",
"[",
"ids_edge",
"]",
")",
"+",
"self",
".",
"lengths",
"[",
"ids_edge",
"]",
"ids_lanes",
"=",
"self",
".",
"ids_lanes",
"if",
"is_check_lanes",
"&",
"(",
"id_mode",
">",
"0",
")",
":",
"# mode 0 can pass everywhere",
"#get_laneindex_allowed = self.parent.lanes.get_laneindex_allowed",
"get_accesslevel",
"=",
"self",
".",
"parent",
".",
"lanes",
".",
"get_accesslevel",
"has_noaccess",
"=",
"np",
".",
"nan",
"for",
"id_edge",
"in",
"ids_edge",
":",
"if",
"get_accesslevel",
"(",
"ids_lanes",
"[",
"id_edge",
"]",
",",
"id_mode",
")",
"==",
"-",
"1",
":",
"dists",
"[",
"id_edge",
"]",
"=",
"has_noaccess",
"accesslevel",
"=",
"get_accesslevel",
"(",
"ids_lanes",
"[",
"id_edge",
"]",
",",
"id_mode",
")",
"if",
"accesslevel",
"==",
"-",
"1",
":",
"dists",
"[",
"id_edge",
"]",
"=",
"has_noaccess",
"elif",
"accesslevel",
"==",
"2",
":",
"dists",
"[",
"id_edge",
"]",
"=",
"max",
"(",
"dists",
"[",
"id_edge",
"]",
"+",
"modeconst_excl",
",",
"0",
")",
"elif",
"accesslevel",
"==",
"1",
":",
"dists",
"[",
"id_edge",
"]",
"=",
"max",
"(",
"dists",
"[",
"id_edge",
"]",
"+",
"modeconst_mix",
",",
"0",
")",
"return",
"dists"
]
| https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/contributed/sumopy/coremodules/network/network.py#L2008-L2068 |
|
bulletphysics/bullet3 | f0f2a952e146f016096db6f85cf0c44ed75b0b9a | examples/pybullet/gym/pybullet_envs/bullet/minitaur.py | python | Minitaur.GetBasePosition | (self) | return position | Get the position of minitaur's base.
Returns:
The position of minitaur's base. | Get the position of minitaur's base. | [
"Get",
"the",
"position",
"of",
"minitaur",
"s",
"base",
"."
]
| def GetBasePosition(self):
"""Get the position of minitaur's base.
Returns:
The position of minitaur's base.
"""
position, _ = (self._pybullet_client.getBasePositionAndOrientation(self.quadruped))
return position | [
"def",
"GetBasePosition",
"(",
"self",
")",
":",
"position",
",",
"_",
"=",
"(",
"self",
".",
"_pybullet_client",
".",
"getBasePositionAndOrientation",
"(",
"self",
".",
"quadruped",
")",
")",
"return",
"position"
]
| https://github.com/bulletphysics/bullet3/blob/f0f2a952e146f016096db6f85cf0c44ed75b0b9a/examples/pybullet/gym/pybullet_envs/bullet/minitaur.py#L253-L260 |
|
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/gsutil/third_party/rsa/rsa/key.py | python | PublicKey.load_pkcs1_openssl_pem | (cls, keyfile) | return cls.load_pkcs1_openssl_der(der) | Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL.
These files can be recognised in that they start with BEGIN PUBLIC KEY
rather than BEGIN RSA PUBLIC KEY.
The contents of the file before the "-----BEGIN PUBLIC KEY-----" and
after the "-----END PUBLIC KEY-----" lines is ignored.
@param keyfile: contents of a PEM-encoded file that contains the public
key, from OpenSSL.
@return: a PublicKey object | Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL.
These files can be recognised in that they start with BEGIN PUBLIC KEY
rather than BEGIN RSA PUBLIC KEY.
The contents of the file before the "-----BEGIN PUBLIC KEY-----" and
after the "-----END PUBLIC KEY-----" lines is ignored. | [
"Loads",
"a",
"PKCS#1",
".",
"5",
"PEM",
"-",
"encoded",
"public",
"key",
"file",
"from",
"OpenSSL",
".",
"These",
"files",
"can",
"be",
"recognised",
"in",
"that",
"they",
"start",
"with",
"BEGIN",
"PUBLIC",
"KEY",
"rather",
"than",
"BEGIN",
"RSA",
"PUBLIC",
"KEY",
".",
"The",
"contents",
"of",
"the",
"file",
"before",
"the",
"-----",
"BEGIN",
"PUBLIC",
"KEY",
"-----",
"and",
"after",
"the",
"-----",
"END",
"PUBLIC",
"KEY",
"-----",
"lines",
"is",
"ignored",
"."
]
| def load_pkcs1_openssl_pem(cls, keyfile):
'''Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL.
These files can be recognised in that they start with BEGIN PUBLIC KEY
rather than BEGIN RSA PUBLIC KEY.
The contents of the file before the "-----BEGIN PUBLIC KEY-----" and
after the "-----END PUBLIC KEY-----" lines is ignored.
@param keyfile: contents of a PEM-encoded file that contains the public
key, from OpenSSL.
@return: a PublicKey object
'''
der = rsa.pem.load_pem(keyfile, 'PUBLIC KEY')
return cls.load_pkcs1_openssl_der(der) | [
"def",
"load_pkcs1_openssl_pem",
"(",
"cls",
",",
"keyfile",
")",
":",
"der",
"=",
"rsa",
".",
"pem",
".",
"load_pem",
"(",
"keyfile",
",",
"'PUBLIC KEY'",
")",
"return",
"cls",
".",
"load_pkcs1_openssl_der",
"(",
"der",
")"
]
| https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/rsa/rsa/key.py#L204-L219 |
|
pmq20/node-packer | 12c46c6e44fbc14d9ee645ebd17d5296b324f7e0 | lts/deps/v8/third_party/jinja2/parser.py | python | Parser.parse_statements | (self, end_tokens, drop_needle=False) | return result | Parse multiple statements into a list until one of the end tokens
is reached. This is used to parse the body of statements as it also
parses template data if appropriate. The parser checks first if the
current token is a colon and skips it if there is one. Then it checks
for the block end and parses until if one of the `end_tokens` is
reached. Per default the active token in the stream at the end of
the call is the matched end token. If this is not wanted `drop_needle`
can be set to `True` and the end token is removed. | Parse multiple statements into a list until one of the end tokens
is reached. This is used to parse the body of statements as it also
parses template data if appropriate. The parser checks first if the
current token is a colon and skips it if there is one. Then it checks
for the block end and parses until if one of the `end_tokens` is
reached. Per default the active token in the stream at the end of
the call is the matched end token. If this is not wanted `drop_needle`
can be set to `True` and the end token is removed. | [
"Parse",
"multiple",
"statements",
"into",
"a",
"list",
"until",
"one",
"of",
"the",
"end",
"tokens",
"is",
"reached",
".",
"This",
"is",
"used",
"to",
"parse",
"the",
"body",
"of",
"statements",
"as",
"it",
"also",
"parses",
"template",
"data",
"if",
"appropriate",
".",
"The",
"parser",
"checks",
"first",
"if",
"the",
"current",
"token",
"is",
"a",
"colon",
"and",
"skips",
"it",
"if",
"there",
"is",
"one",
".",
"Then",
"it",
"checks",
"for",
"the",
"block",
"end",
"and",
"parses",
"until",
"if",
"one",
"of",
"the",
"end_tokens",
"is",
"reached",
".",
"Per",
"default",
"the",
"active",
"token",
"in",
"the",
"stream",
"at",
"the",
"end",
"of",
"the",
"call",
"is",
"the",
"matched",
"end",
"token",
".",
"If",
"this",
"is",
"not",
"wanted",
"drop_needle",
"can",
"be",
"set",
"to",
"True",
"and",
"the",
"end",
"token",
"is",
"removed",
"."
]
| def parse_statements(self, end_tokens, drop_needle=False):
"""Parse multiple statements into a list until one of the end tokens
is reached. This is used to parse the body of statements as it also
parses template data if appropriate. The parser checks first if the
current token is a colon and skips it if there is one. Then it checks
for the block end and parses until if one of the `end_tokens` is
reached. Per default the active token in the stream at the end of
the call is the matched end token. If this is not wanted `drop_needle`
can be set to `True` and the end token is removed.
"""
# the first token may be a colon for python compatibility
self.stream.skip_if('colon')
# in the future it would be possible to add whole code sections
# by adding some sort of end of statement token and parsing those here.
self.stream.expect('block_end')
result = self.subparse(end_tokens)
# we reached the end of the template too early, the subparser
# does not check for this, so we do that now
if self.stream.current.type == 'eof':
self.fail_eof(end_tokens)
if drop_needle:
next(self.stream)
return result | [
"def",
"parse_statements",
"(",
"self",
",",
"end_tokens",
",",
"drop_needle",
"=",
"False",
")",
":",
"# the first token may be a colon for python compatibility",
"self",
".",
"stream",
".",
"skip_if",
"(",
"'colon'",
")",
"# in the future it would be possible to add whole code sections",
"# by adding some sort of end of statement token and parsing those here.",
"self",
".",
"stream",
".",
"expect",
"(",
"'block_end'",
")",
"result",
"=",
"self",
".",
"subparse",
"(",
"end_tokens",
")",
"# we reached the end of the template too early, the subparser",
"# does not check for this, so we do that now",
"if",
"self",
".",
"stream",
".",
"current",
".",
"type",
"==",
"'eof'",
":",
"self",
".",
"fail_eof",
"(",
"end_tokens",
")",
"if",
"drop_needle",
":",
"next",
"(",
"self",
".",
"stream",
")",
"return",
"result"
]
| https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/deps/v8/third_party/jinja2/parser.py#L149-L174 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/tools/Editra/src/eclib/colorsetter.py | python | ColorSetter.__UpdateValues | (self) | Update the values based on the current state of the text control | Update the values based on the current state of the text control | [
"Update",
"the",
"values",
"based",
"on",
"the",
"current",
"state",
"of",
"the",
"text",
"control"
]
| def __UpdateValues(self):
"""Update the values based on the current state of the text control"""
with Freezer(self._txt) as _tmp:
cpos = self._txt.GetInsertionPoint()
hexstr = self._txt.GetValue().replace('#', '').strip()
valid = ''
for char in hexstr:
if char in HEX_CHARS[:-1]:
valid = valid + char
if len(valid) > 6:
valid = valid[:6]
valid = '#' + valid
self._txt.SetValue(valid)
self._txt.SetInsertionPoint(cpos)
valid = valid + (u'0' * (6 - len(valid)))
self._cbtn.SetValue(HexToRGB(valid)) | [
"def",
"__UpdateValues",
"(",
"self",
")",
":",
"with",
"Freezer",
"(",
"self",
".",
"_txt",
")",
"as",
"_tmp",
":",
"cpos",
"=",
"self",
".",
"_txt",
".",
"GetInsertionPoint",
"(",
")",
"hexstr",
"=",
"self",
".",
"_txt",
".",
"GetValue",
"(",
")",
".",
"replace",
"(",
"'#'",
",",
"''",
")",
".",
"strip",
"(",
")",
"valid",
"=",
"''",
"for",
"char",
"in",
"hexstr",
":",
"if",
"char",
"in",
"HEX_CHARS",
"[",
":",
"-",
"1",
"]",
":",
"valid",
"=",
"valid",
"+",
"char",
"if",
"len",
"(",
"valid",
")",
">",
"6",
":",
"valid",
"=",
"valid",
"[",
":",
"6",
"]",
"valid",
"=",
"'#'",
"+",
"valid",
"self",
".",
"_txt",
".",
"SetValue",
"(",
"valid",
")",
"self",
".",
"_txt",
".",
"SetInsertionPoint",
"(",
"cpos",
")",
"valid",
"=",
"valid",
"+",
"(",
"u'0'",
"*",
"(",
"6",
"-",
"len",
"(",
"valid",
")",
")",
")",
"self",
".",
"_cbtn",
".",
"SetValue",
"(",
"HexToRGB",
"(",
"valid",
")",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/eclib/colorsetter.py#L110-L127 |
||
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/contrib/framework/python/framework/checkpoint_utils.py | python | init_from_checkpoint | (checkpoint_dir, assignment_map) | Using assignment map initializes current variables with loaded tensors.
Note: This overrides default initialization ops of specified variables and
redefines dtype.
Assignment map supports following syntax:
* `'checkpoint_scope_name/': 'scope_name/'` - will load all variables in
current `scope_name` from `checkpoint_scope_name` with matching variable
names.
* `'checkpoint_scope_name/some_other_variable': 'scope_name/variable_name'` -
will initialize `scope_name/variable_name` variable
from `checkpoint_scope_name/some_other_variable`.
* `'scope_variable_name': variable` - will initialize given `tf.Variable`
object with variable from the checkpoint.
* `'scope_variable_name': list(variable)` - will initialize list of
partitioned variables with variable from the checkpoint.
* `'/': 'scope_name/'` - will load all variables in current `scope_name` from
checkpoint's root (e.g. no scope).
Supports loading into partitioned variables, which are represented as
`'<variable>/part_<part #>'`.
Example:
```python
# Create variables.
with tf.variable_scope('test'):
m = tf.get_variable('my_var')
with tf.variable_scope('test2'):
var2 = tf.get_variable('my_var')
var3 = tf.get_variable(name="my1", shape=[100, 100],
partitioner=lambda shape, dtype: [5, 1])
...
# Specify which variables to initialize from checkpoint.
init_from_checkpoint(checkpoint_dir, {
'some_var': 'test/my_var',
'some_scope/': 'test2/'})
...
# Or use `Variable` objects to identify what to initialize.
init_from_checkpoint(checkpoint_dir, {
'some_scope/var2': var2,
})
# Initialize partitioned variables
init_from_checkpoint(checkpoint_dir, {
'some_var_from_ckpt': 'part_var',
})
# Or specifying the list of `Variable` objects.
init_from_checkpoint(checkpoint_dir, {
'some_var_from_ckpt': var3._get_variable_list(),
})
...
# Initialize variables as usual.
session.run(tf.get_all_variables())
```
Args:
checkpoint_dir: Directory with checkpoints file or path to checkpoint.
assignment_map: Dict, where keys are names of the variables in the
checkpoint and values are current variables or names of current variables
(in default graph).
Raises:
tf.errors.OpError: If missing checkpoints or tensors in checkpoints.
ValueError: If missing variables in current graph. | Using assignment map initializes current variables with loaded tensors. | [
"Using",
"assignment",
"map",
"initializes",
"current",
"variables",
"with",
"loaded",
"tensors",
"."
]
| def init_from_checkpoint(checkpoint_dir, assignment_map):
"""Using assignment map initializes current variables with loaded tensors.
Note: This overrides default initialization ops of specified variables and
redefines dtype.
Assignment map supports following syntax:
* `'checkpoint_scope_name/': 'scope_name/'` - will load all variables in
current `scope_name` from `checkpoint_scope_name` with matching variable
names.
* `'checkpoint_scope_name/some_other_variable': 'scope_name/variable_name'` -
will initialize `scope_name/variable_name` variable
from `checkpoint_scope_name/some_other_variable`.
* `'scope_variable_name': variable` - will initialize given `tf.Variable`
object with variable from the checkpoint.
* `'scope_variable_name': list(variable)` - will initialize list of
partitioned variables with variable from the checkpoint.
* `'/': 'scope_name/'` - will load all variables in current `scope_name` from
checkpoint's root (e.g. no scope).
Supports loading into partitioned variables, which are represented as
`'<variable>/part_<part #>'`.
Example:
```python
# Create variables.
with tf.variable_scope('test'):
m = tf.get_variable('my_var')
with tf.variable_scope('test2'):
var2 = tf.get_variable('my_var')
var3 = tf.get_variable(name="my1", shape=[100, 100],
partitioner=lambda shape, dtype: [5, 1])
...
# Specify which variables to initialize from checkpoint.
init_from_checkpoint(checkpoint_dir, {
'some_var': 'test/my_var',
'some_scope/': 'test2/'})
...
# Or use `Variable` objects to identify what to initialize.
init_from_checkpoint(checkpoint_dir, {
'some_scope/var2': var2,
})
# Initialize partitioned variables
init_from_checkpoint(checkpoint_dir, {
'some_var_from_ckpt': 'part_var',
})
# Or specifying the list of `Variable` objects.
init_from_checkpoint(checkpoint_dir, {
'some_var_from_ckpt': var3._get_variable_list(),
})
...
# Initialize variables as usual.
session.run(tf.get_all_variables())
```
Args:
checkpoint_dir: Directory with checkpoints file or path to checkpoint.
assignment_map: Dict, where keys are names of the variables in the
checkpoint and values are current variables or names of current variables
(in default graph).
Raises:
tf.errors.OpError: If missing checkpoints or tensors in checkpoints.
ValueError: If missing variables in current graph.
"""
filepattern = _get_checkpoint_filename(checkpoint_dir)
reader = load_checkpoint(checkpoint_dir)
variable_map = reader.get_variable_to_shape_map()
for tensor_name_in_ckpt, current_var_or_name in six.iteritems(assignment_map):
var = None
# Check if this is Variable object or list of Variable objects (in case of
# partitioned variables).
is_var = lambda x: isinstance(x, variables.Variable)
if is_var(current_var_or_name) or (
isinstance(current_var_or_name, list)
and all(is_var(v) for v in current_var_or_name)):
var = current_var_or_name
else:
var_scope = vs._get_default_variable_store()
# Check if this variable is in var_store.
var = var_scope._vars.get(current_var_or_name, None)
# Also check if variable is partitioned as list.
if var is None:
var = _collect_partitioned_variable(current_var_or_name, var_scope)
if var is not None:
# If 1 to 1 mapping was provided, find variable in the checkpoint.
if tensor_name_in_ckpt not in variable_map:
raise ValueError("Tensor %s is not found in %s checkpoint %s" % (
tensor_name_in_ckpt, checkpoint_dir, variable_map
))
if is_var(var):
# Additional at-call-time checks.
if not var.get_shape().is_compatible_with(
variable_map[tensor_name_in_ckpt]):
raise ValueError(
"Shape of variable %s (%s) doesn't match with shape of "
"tensor %s (%s) from checkpoint reader." % (
var.name, str(var.get_shape()),
tensor_name_in_ckpt, str(variable_map[tensor_name_in_ckpt])
))
var_name = var.name
else:
var_name = ",".join([v.name for v in var])
_set_variable_or_list_initializer(var, filepattern, tensor_name_in_ckpt)
logging.info("Initialize variable %s from checkpoint %s with %s" % (
var_name, checkpoint_dir, tensor_name_in_ckpt
))
else:
scopes = ""
# TODO(vihanjain): Support list of 'current_var_or_name' here.
if "/" in current_var_or_name:
scopes = current_var_or_name[:current_var_or_name.rindex("/")]
if not tensor_name_in_ckpt.endswith("/"):
raise ValueError(
"Assignment map with scope only name {} should map to scope only "
"{}. Should be 'scope/': 'other_scope/'.".format(
scopes, tensor_name_in_ckpt))
# If scope to scope mapping was provided, find all variables in the scope
# and create variable to variable mapping.
scope_variables = set()
for var_name in var_scope._vars:
if not scopes or var_name.startswith(scopes + "/"):
# Consume /part_ if partitioned variable.
if "/part_" in var_name:
var_name = var_name[:var_name.index("/part_")]
scope_variables.add(var_name)
for var_name in scope_variables:
# Lookup name with specified prefix and suffix from current variable.
# If tensor_name given is '/' (root), don't use it for full name.
full_tensor_name = var_name[len(scopes):]
if current_var_or_name != "/":
full_tensor_name = full_tensor_name[1:]
if tensor_name_in_ckpt != "/":
full_tensor_name = tensor_name_in_ckpt + full_tensor_name
if full_tensor_name not in variable_map:
raise ValueError(
"Tensor %s (%s in %s) is not found in %s checkpoint" % (
full_tensor_name, var_name[len(scopes) + 1:],
tensor_name_in_ckpt, checkpoint_dir
))
var = var_scope._vars.get(var_name, None)
if var is None:
var = _collect_partitioned_variable(var_name, var_scope)
_set_variable_or_list_initializer(var, filepattern, full_tensor_name)
logging.info("Initialize variable %s from checkpoint %s with %s" % (
var_name, checkpoint_dir, full_tensor_name
)) | [
"def",
"init_from_checkpoint",
"(",
"checkpoint_dir",
",",
"assignment_map",
")",
":",
"filepattern",
"=",
"_get_checkpoint_filename",
"(",
"checkpoint_dir",
")",
"reader",
"=",
"load_checkpoint",
"(",
"checkpoint_dir",
")",
"variable_map",
"=",
"reader",
".",
"get_variable_to_shape_map",
"(",
")",
"for",
"tensor_name_in_ckpt",
",",
"current_var_or_name",
"in",
"six",
".",
"iteritems",
"(",
"assignment_map",
")",
":",
"var",
"=",
"None",
"# Check if this is Variable object or list of Variable objects (in case of",
"# partitioned variables).",
"is_var",
"=",
"lambda",
"x",
":",
"isinstance",
"(",
"x",
",",
"variables",
".",
"Variable",
")",
"if",
"is_var",
"(",
"current_var_or_name",
")",
"or",
"(",
"isinstance",
"(",
"current_var_or_name",
",",
"list",
")",
"and",
"all",
"(",
"is_var",
"(",
"v",
")",
"for",
"v",
"in",
"current_var_or_name",
")",
")",
":",
"var",
"=",
"current_var_or_name",
"else",
":",
"var_scope",
"=",
"vs",
".",
"_get_default_variable_store",
"(",
")",
"# Check if this variable is in var_store.",
"var",
"=",
"var_scope",
".",
"_vars",
".",
"get",
"(",
"current_var_or_name",
",",
"None",
")",
"# Also check if variable is partitioned as list.",
"if",
"var",
"is",
"None",
":",
"var",
"=",
"_collect_partitioned_variable",
"(",
"current_var_or_name",
",",
"var_scope",
")",
"if",
"var",
"is",
"not",
"None",
":",
"# If 1 to 1 mapping was provided, find variable in the checkpoint.",
"if",
"tensor_name_in_ckpt",
"not",
"in",
"variable_map",
":",
"raise",
"ValueError",
"(",
"\"Tensor %s is not found in %s checkpoint %s\"",
"%",
"(",
"tensor_name_in_ckpt",
",",
"checkpoint_dir",
",",
"variable_map",
")",
")",
"if",
"is_var",
"(",
"var",
")",
":",
"# Additional at-call-time checks.",
"if",
"not",
"var",
".",
"get_shape",
"(",
")",
".",
"is_compatible_with",
"(",
"variable_map",
"[",
"tensor_name_in_ckpt",
"]",
")",
":",
"raise",
"ValueError",
"(",
"\"Shape of variable %s (%s) doesn't match with shape of \"",
"\"tensor %s (%s) from checkpoint reader.\"",
"%",
"(",
"var",
".",
"name",
",",
"str",
"(",
"var",
".",
"get_shape",
"(",
")",
")",
",",
"tensor_name_in_ckpt",
",",
"str",
"(",
"variable_map",
"[",
"tensor_name_in_ckpt",
"]",
")",
")",
")",
"var_name",
"=",
"var",
".",
"name",
"else",
":",
"var_name",
"=",
"\",\"",
".",
"join",
"(",
"[",
"v",
".",
"name",
"for",
"v",
"in",
"var",
"]",
")",
"_set_variable_or_list_initializer",
"(",
"var",
",",
"filepattern",
",",
"tensor_name_in_ckpt",
")",
"logging",
".",
"info",
"(",
"\"Initialize variable %s from checkpoint %s with %s\"",
"%",
"(",
"var_name",
",",
"checkpoint_dir",
",",
"tensor_name_in_ckpt",
")",
")",
"else",
":",
"scopes",
"=",
"\"\"",
"# TODO(vihanjain): Support list of 'current_var_or_name' here.",
"if",
"\"/\"",
"in",
"current_var_or_name",
":",
"scopes",
"=",
"current_var_or_name",
"[",
":",
"current_var_or_name",
".",
"rindex",
"(",
"\"/\"",
")",
"]",
"if",
"not",
"tensor_name_in_ckpt",
".",
"endswith",
"(",
"\"/\"",
")",
":",
"raise",
"ValueError",
"(",
"\"Assignment map with scope only name {} should map to scope only \"",
"\"{}. Should be 'scope/': 'other_scope/'.\"",
".",
"format",
"(",
"scopes",
",",
"tensor_name_in_ckpt",
")",
")",
"# If scope to scope mapping was provided, find all variables in the scope",
"# and create variable to variable mapping.",
"scope_variables",
"=",
"set",
"(",
")",
"for",
"var_name",
"in",
"var_scope",
".",
"_vars",
":",
"if",
"not",
"scopes",
"or",
"var_name",
".",
"startswith",
"(",
"scopes",
"+",
"\"/\"",
")",
":",
"# Consume /part_ if partitioned variable.",
"if",
"\"/part_\"",
"in",
"var_name",
":",
"var_name",
"=",
"var_name",
"[",
":",
"var_name",
".",
"index",
"(",
"\"/part_\"",
")",
"]",
"scope_variables",
".",
"add",
"(",
"var_name",
")",
"for",
"var_name",
"in",
"scope_variables",
":",
"# Lookup name with specified prefix and suffix from current variable.",
"# If tensor_name given is '/' (root), don't use it for full name.",
"full_tensor_name",
"=",
"var_name",
"[",
"len",
"(",
"scopes",
")",
":",
"]",
"if",
"current_var_or_name",
"!=",
"\"/\"",
":",
"full_tensor_name",
"=",
"full_tensor_name",
"[",
"1",
":",
"]",
"if",
"tensor_name_in_ckpt",
"!=",
"\"/\"",
":",
"full_tensor_name",
"=",
"tensor_name_in_ckpt",
"+",
"full_tensor_name",
"if",
"full_tensor_name",
"not",
"in",
"variable_map",
":",
"raise",
"ValueError",
"(",
"\"Tensor %s (%s in %s) is not found in %s checkpoint\"",
"%",
"(",
"full_tensor_name",
",",
"var_name",
"[",
"len",
"(",
"scopes",
")",
"+",
"1",
":",
"]",
",",
"tensor_name_in_ckpt",
",",
"checkpoint_dir",
")",
")",
"var",
"=",
"var_scope",
".",
"_vars",
".",
"get",
"(",
"var_name",
",",
"None",
")",
"if",
"var",
"is",
"None",
":",
"var",
"=",
"_collect_partitioned_variable",
"(",
"var_name",
",",
"var_scope",
")",
"_set_variable_or_list_initializer",
"(",
"var",
",",
"filepattern",
",",
"full_tensor_name",
")",
"logging",
".",
"info",
"(",
"\"Initialize variable %s from checkpoint %s with %s\"",
"%",
"(",
"var_name",
",",
"checkpoint_dir",
",",
"full_tensor_name",
")",
")"
]
| https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/contrib/framework/python/framework/checkpoint_utils.py#L152-L300 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/richtext.py | python | TextBoxAttr.GetOutline | (*args) | return _richtext.TextBoxAttr_GetOutline(*args) | GetOutline(self) -> TextAttrBorders
GetOutline(self) -> TextAttrBorders | GetOutline(self) -> TextAttrBorders
GetOutline(self) -> TextAttrBorders | [
"GetOutline",
"(",
"self",
")",
"-",
">",
"TextAttrBorders",
"GetOutline",
"(",
"self",
")",
"-",
">",
"TextAttrBorders"
]
| def GetOutline(*args):
"""
GetOutline(self) -> TextAttrBorders
GetOutline(self) -> TextAttrBorders
"""
return _richtext.TextBoxAttr_GetOutline(*args) | [
"def",
"GetOutline",
"(",
"*",
"args",
")",
":",
"return",
"_richtext",
".",
"TextBoxAttr_GetOutline",
"(",
"*",
"args",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/richtext.py#L768-L773 |
|
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/basic_fitting/basic_fitting_model.py | python | BasicFittingModel.plot_guess_start_x | (self, plot_guess_start_x: float) | Sets the start x to use in the guess plot. | Sets the start x to use in the guess plot. | [
"Sets",
"the",
"start",
"x",
"to",
"use",
"in",
"the",
"guess",
"plot",
"."
]
| def plot_guess_start_x(self, plot_guess_start_x: float) -> None:
"""Sets the start x to use in the guess plot."""
self.fitting_context.plot_guess_start_x = plot_guess_start_x | [
"def",
"plot_guess_start_x",
"(",
"self",
",",
"plot_guess_start_x",
":",
"float",
")",
"->",
"None",
":",
"self",
".",
"fitting_context",
".",
"plot_guess_start_x",
"=",
"plot_guess_start_x"
]
| https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/basic_fitting/basic_fitting_model.py#L400-L402 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/grid.py | python | Grid.RefreshAttr | (*args, **kwargs) | return _grid.Grid_RefreshAttr(*args, **kwargs) | RefreshAttr(self, int row, int col) | RefreshAttr(self, int row, int col) | [
"RefreshAttr",
"(",
"self",
"int",
"row",
"int",
"col",
")"
]
| def RefreshAttr(*args, **kwargs):
"""RefreshAttr(self, int row, int col)"""
return _grid.Grid_RefreshAttr(*args, **kwargs) | [
"def",
"RefreshAttr",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"Grid_RefreshAttr",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/grid.py#L1722-L1724 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pkg_resources/__init__.py | python | IMetadataProvider.metadata_isdir | (name) | Is the named metadata a directory? (like ``os.path.isdir()``) | Is the named metadata a directory? (like ``os.path.isdir()``) | [
"Is",
"the",
"named",
"metadata",
"a",
"directory?",
"(",
"like",
"os",
".",
"path",
".",
"isdir",
"()",
")"
]
| def metadata_isdir(name):
"""Is the named metadata a directory? (like ``os.path.isdir()``)""" | [
"def",
"metadata_isdir",
"(",
"name",
")",
":"
]
| https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pkg_resources/__init__.py#L516-L517 |
||
jubatus/jubatus | 1251ce551bac980488a6313728e72b3fe0b79a9f | plugin/src/fv_converter/python_bridge/python/space_splitter.py | python | SpaceSplitter.split | (self, text) | return result | Splits the input text with space.
The input text is typed as ``str``.
This method should return list of boundaries (pair of beginning
position and length), e.g., ``[ (0, 1), (3, 4) ]``. | Splits the input text with space. | [
"Splits",
"the",
"input",
"text",
"with",
"space",
"."
]
| def split(self, text):
"""
Splits the input text with space.
The input text is typed as ``str``.
This method should return list of boundaries (pair of beginning
position and length), e.g., ``[ (0, 1), (3, 4) ]``.
"""
result = []
begin = 0
length = 0
for (pos, char) in enumerate(text):
if char == ' ':
if 0 < length:
result.append((begin, length))
length = 0
begin = pos + 1
else:
length += 1
if length != 0:
result.append((begin, length))
return result | [
"def",
"split",
"(",
"self",
",",
"text",
")",
":",
"result",
"=",
"[",
"]",
"begin",
"=",
"0",
"length",
"=",
"0",
"for",
"(",
"pos",
",",
"char",
")",
"in",
"enumerate",
"(",
"text",
")",
":",
"if",
"char",
"==",
"' '",
":",
"if",
"0",
"<",
"length",
":",
"result",
".",
"append",
"(",
"(",
"begin",
",",
"length",
")",
")",
"length",
"=",
"0",
"begin",
"=",
"pos",
"+",
"1",
"else",
":",
"length",
"+=",
"1",
"if",
"length",
"!=",
"0",
":",
"result",
".",
"append",
"(",
"(",
"begin",
",",
"length",
")",
")",
"return",
"result"
]
| https://github.com/jubatus/jubatus/blob/1251ce551bac980488a6313728e72b3fe0b79a9f/plugin/src/fv_converter/python_bridge/python/space_splitter.py#L15-L38 |
|
KhronosGroup/SPIR | f33c27876d9f3d5810162b60fa89cc13d2b55725 | bindings/python/clang/cindex.py | python | CursorKind.is_statement | (self) | return conf.lib.clang_isStatement(self) | Test if this is a statement kind. | Test if this is a statement kind. | [
"Test",
"if",
"this",
"is",
"a",
"statement",
"kind",
"."
]
| def is_statement(self):
"""Test if this is a statement kind."""
return conf.lib.clang_isStatement(self) | [
"def",
"is_statement",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_isStatement",
"(",
"self",
")"
]
| https://github.com/KhronosGroup/SPIR/blob/f33c27876d9f3d5810162b60fa89cc13d2b55725/bindings/python/clang/cindex.py#L531-L533 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/_core.py | python | SizerItem.SetFlag | (*args, **kwargs) | return _core_.SizerItem_SetFlag(*args, **kwargs) | SetFlag(self, int flag)
Set the flag value for this item. | SetFlag(self, int flag) | [
"SetFlag",
"(",
"self",
"int",
"flag",
")"
]
| def SetFlag(*args, **kwargs):
"""
SetFlag(self, int flag)
Set the flag value for this item.
"""
return _core_.SizerItem_SetFlag(*args, **kwargs) | [
"def",
"SetFlag",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"SizerItem_SetFlag",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L14203-L14209 |
|
bulletphysics/bullet3 | f0f2a952e146f016096db6f85cf0c44ed75b0b9a | examples/pybullet/gym/pybullet_envs/minitaur/envs_v2/evaluation/metric.py | python | MetricCore.__init__ | (
self,
name: Text,
scope: MetricScope,
single_ep_aggregator: Callable[[Sequence[Any]], Any],
multi_ep_aggregator: Callable[[Sequence[Any]], Dict[Text, Any]],
) | Initializes the class.
Args:
name: The name of the metric, for example "motor_torques",
"distance_to_wall", etc. The full name of the metric will have scope
name in the prefix, i.e. "scope/name".
scope: The scope of this metric. Most metric should be for DEBUG purpose.
The scope name will be added to the final name of metric in this way:
"scope/name", which is standarded format for Tensorboard to group
named variables.
single_ep_aggregator: The function to process all aggregated metric
values. The derived MetricReporter (see below) will implements
reset_episode() which clears the episode data, and will be called during
env.reset().
multi_ep_aggregator: The functions to process multi-episode metric values.
We assume the inputs to the functions is a list of per episode metric
values, i.e. each element of the list is the output from the
single_ep_aggregator. | Initializes the class. | [
"Initializes",
"the",
"class",
"."
]
| def __init__(
self,
name: Text,
scope: MetricScope,
single_ep_aggregator: Callable[[Sequence[Any]], Any],
multi_ep_aggregator: Callable[[Sequence[Any]], Dict[Text, Any]],
):
"""Initializes the class.
Args:
name: The name of the metric, for example "motor_torques",
"distance_to_wall", etc. The full name of the metric will have scope
name in the prefix, i.e. "scope/name".
scope: The scope of this metric. Most metric should be for DEBUG purpose.
The scope name will be added to the final name of metric in this way:
"scope/name", which is standarded format for Tensorboard to group
named variables.
single_ep_aggregator: The function to process all aggregated metric
values. The derived MetricReporter (see below) will implements
reset_episode() which clears the episode data, and will be called during
env.reset().
multi_ep_aggregator: The functions to process multi-episode metric values.
We assume the inputs to the functions is a list of per episode metric
values, i.e. each element of the list is the output from the
single_ep_aggregator.
"""
self._name = scope.name + "/" + name
self._single_ep_aggregator = single_ep_aggregator
self._multi_ep_aggregator = multi_ep_aggregator
self._episode_data = [] | [
"def",
"__init__",
"(",
"self",
",",
"name",
":",
"Text",
",",
"scope",
":",
"MetricScope",
",",
"single_ep_aggregator",
":",
"Callable",
"[",
"[",
"Sequence",
"[",
"Any",
"]",
"]",
",",
"Any",
"]",
",",
"multi_ep_aggregator",
":",
"Callable",
"[",
"[",
"Sequence",
"[",
"Any",
"]",
"]",
",",
"Dict",
"[",
"Text",
",",
"Any",
"]",
"]",
",",
")",
":",
"self",
".",
"_name",
"=",
"scope",
".",
"name",
"+",
"\"/\"",
"+",
"name",
"self",
".",
"_single_ep_aggregator",
"=",
"single_ep_aggregator",
"self",
".",
"_multi_ep_aggregator",
"=",
"multi_ep_aggregator",
"self",
".",
"_episode_data",
"=",
"[",
"]"
]
| https://github.com/bulletphysics/bullet3/blob/f0f2a952e146f016096db6f85cf0c44ed75b0b9a/examples/pybullet/gym/pybullet_envs/minitaur/envs_v2/evaluation/metric.py#L38-L67 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/tools/Editra/src/util.py | python | GetFileType | (fname) | Get what the type of the file is as Editra sees it
in a formatted string.
@param fname: file path
@return: string (formatted/translated filetype) | Get what the type of the file is as Editra sees it
in a formatted string.
@param fname: file path
@return: string (formatted/translated filetype) | [
"Get",
"what",
"the",
"type",
"of",
"the",
"file",
"is",
"as",
"Editra",
"sees",
"it",
"in",
"a",
"formatted",
"string",
".",
"@param",
"fname",
":",
"file",
"path",
"@return",
":",
"string",
"(",
"formatted",
"/",
"translated",
"filetype",
")"
]
| def GetFileType(fname):
"""Get what the type of the file is as Editra sees it
in a formatted string.
@param fname: file path
@return: string (formatted/translated filetype)
"""
if os.path.isdir(fname):
return _("Folder")
eguess = syntax.GetTypeFromExt(fname.split('.')[-1])
if eguess == synglob.LANG_TXT and fname.split('.')[-1] == 'txt':
return _("Text Document")
elif eguess == synglob.LANG_TXT:
mtype = mimetypes.guess_type(fname)[0]
if mtype is not None:
return mtype
else:
return _("Unknown")
else:
return _("%s Source File") % eguess | [
"def",
"GetFileType",
"(",
"fname",
")",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"fname",
")",
":",
"return",
"_",
"(",
"\"Folder\"",
")",
"eguess",
"=",
"syntax",
".",
"GetTypeFromExt",
"(",
"fname",
".",
"split",
"(",
"'.'",
")",
"[",
"-",
"1",
"]",
")",
"if",
"eguess",
"==",
"synglob",
".",
"LANG_TXT",
"and",
"fname",
".",
"split",
"(",
"'.'",
")",
"[",
"-",
"1",
"]",
"==",
"'txt'",
":",
"return",
"_",
"(",
"\"Text Document\"",
")",
"elif",
"eguess",
"==",
"synglob",
".",
"LANG_TXT",
":",
"mtype",
"=",
"mimetypes",
".",
"guess_type",
"(",
"fname",
")",
"[",
"0",
"]",
"if",
"mtype",
"is",
"not",
"None",
":",
"return",
"mtype",
"else",
":",
"return",
"_",
"(",
"\"Unknown\"",
")",
"else",
":",
"return",
"_",
"(",
"\"%s Source File\"",
")",
"%",
"eguess"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/util.py#L366-L386 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py2/scipy/linalg/_interpolative_backend.py | python | iddp_svd | (eps, A) | return U, V, S | Compute SVD of a real matrix to a specified relative precision.
:param eps:
Relative precision.
:type eps: float
:param A:
Matrix.
:type A: :class:`numpy.ndarray`
:return:
Left singular vectors.
:rtype: :class:`numpy.ndarray`
:return:
Right singular vectors.
:rtype: :class:`numpy.ndarray`
:return:
Singular values.
:rtype: :class:`numpy.ndarray` | Compute SVD of a real matrix to a specified relative precision. | [
"Compute",
"SVD",
"of",
"a",
"real",
"matrix",
"to",
"a",
"specified",
"relative",
"precision",
"."
]
| def iddp_svd(eps, A):
"""
Compute SVD of a real matrix to a specified relative precision.
:param eps:
Relative precision.
:type eps: float
:param A:
Matrix.
:type A: :class:`numpy.ndarray`
:return:
Left singular vectors.
:rtype: :class:`numpy.ndarray`
:return:
Right singular vectors.
:rtype: :class:`numpy.ndarray`
:return:
Singular values.
:rtype: :class:`numpy.ndarray`
"""
A = np.asfortranarray(A)
m, n = A.shape
k, iU, iV, iS, w, ier = _id.iddp_svd(eps, A)
if ier:
raise _RETCODE_ERROR
U = w[iU-1:iU+m*k-1].reshape((m, k), order='F')
V = w[iV-1:iV+n*k-1].reshape((n, k), order='F')
S = w[iS-1:iS+k-1]
return U, V, S | [
"def",
"iddp_svd",
"(",
"eps",
",",
"A",
")",
":",
"A",
"=",
"np",
".",
"asfortranarray",
"(",
"A",
")",
"m",
",",
"n",
"=",
"A",
".",
"shape",
"k",
",",
"iU",
",",
"iV",
",",
"iS",
",",
"w",
",",
"ier",
"=",
"_id",
".",
"iddp_svd",
"(",
"eps",
",",
"A",
")",
"if",
"ier",
":",
"raise",
"_RETCODE_ERROR",
"U",
"=",
"w",
"[",
"iU",
"-",
"1",
":",
"iU",
"+",
"m",
"*",
"k",
"-",
"1",
"]",
".",
"reshape",
"(",
"(",
"m",
",",
"k",
")",
",",
"order",
"=",
"'F'",
")",
"V",
"=",
"w",
"[",
"iV",
"-",
"1",
":",
"iV",
"+",
"n",
"*",
"k",
"-",
"1",
"]",
".",
"reshape",
"(",
"(",
"n",
",",
"k",
")",
",",
"order",
"=",
"'F'",
")",
"S",
"=",
"w",
"[",
"iS",
"-",
"1",
":",
"iS",
"+",
"k",
"-",
"1",
"]",
"return",
"U",
",",
"V",
",",
"S"
]
| https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/linalg/_interpolative_backend.py#L448-L477 |
|
infinidb/infinidb | 6c9f5dfdabc41ad80e81ba9e1a4eb0d7271a5d23 | writeengine/bulk/qa-bulkload.py | python | main | () | Bulk load the database..
Check that we can write OIDfiles, that all required tools exist,
clean up old files, sort the index inserts and generally rock and roll | Bulk load the database..
Check that we can write OIDfiles, that all required tools exist,
clean up old files, sort the index inserts and generally rock and roll | [
"Bulk",
"load",
"the",
"database",
"..",
"Check",
"that",
"we",
"can",
"write",
"OIDfiles",
"that",
"all",
"required",
"tools",
"exist",
"clean",
"up",
"old",
"files",
"sort",
"the",
"index",
"inserts",
"and",
"generally",
"rock",
"and",
"roll"
]
| def main():
"""
Bulk load the database..
Check that we can write OIDfiles, that all required tools exist,
clean up old files, sort the index inserts and generally rock and roll
"""
start_dir = curdir=os.getcwd() # remember where we started
if not os.access('.', os.W_OK):
os.chdir('/tmp')
logger.warn('Changing to /tmp to have permission to write files')
if not os.environ.has_key('LD_LIBRARY_PATH'):
logger.info('No environment variable LD_LIBRARY_PATH')
else:
if len(os.getenv('LD_LIBRARY_PATH'))<5:
logger.info('Suspicous LD_LIBRARY_PATH: %s'%os.getenv('LD_LIBRARY_PATH'))
#-- figure out paths
home = os.getenv('HOME')
cache = {}
cache['idx'] = '-c'
cache['col'] = '-c'
#-- allow us to specify a write engine branch
opts, args = getopt.getopt(sys.argv[1:], 'n:u', ['nocache=', 'usage'])
for opt, arg in opts:
if opt == '-n' or opt == '--nocache':
if (arg=='idx' or arg=='col'):
cache[arg] = ''
logger.info("No cache for %s"% arg)
if opt == '-u' or opt == '--usage':
usage()
sys.exit()
(bulkroot, dbroot) = find_paths()
logger.info ("Bulkroot: %s \tDBRoot: %s\n"%(bulkroot, dbroot))
check_dirs(bulkroot, dbroot)
if len(glob.glob(bulkroot+'/data/import/*tbl')) == 0:
sys.exit("No files for import found in BulkRoot: %s"%(bulkroot))
if len(glob.glob(dbroot+'/000.dir'))==0:
logger.info("No files found in DBRoot: %s (not fatal)"%dbroot)
## qa version does not build any tools. Cease and desist if any tools missing
toolset = ['dbbuilder', 'cpimport', 'ipcs-pat', 'PrimProc']
for tool in toolset:
try:
res = os.system('which %s'%tool)
finally:
if res:
logger.error("Fatal error: %s not found"%tool)
sys.exit(-1)
## clean up before starting
## remove old db files, removed old temp files, remove shared memory segments,
## kill old PrimProc and start new one
logger.info ("Removing old DB files")
exec_cmd('rm -fr ', dbroot+'/000.dir')
logger.info ("Removing old temp files")
exec_cmd('rm -fr ', bulkroot+'/data/import/*.idx.txt')
logger.info ("Removing shared memory segments")
exec_cmd('ipcs-pat', '-d')
logger.info("Killing primProc")
os.system('killall -q -u $USER PrimProc')
logger.info("Starting primProc")
exec_cmd('PrimProc', "> primproc.log &")
## run dbbuilder
logger.info ("Building db and indexes (no data inserted)")
exec_cmd('yes | dbbuilder', ' 5')
logger.info ("Relocating OID files")
for file in ['colOIDFile.dat', 'dicOIDFile.dat', 'indexOIDFile.dat']:
# use os.rename instead of shutil.move to avoid problems traversing devices
os.rename(file, dbroot+'/'+file)
for xmlfile in glob.glob('./Job*xml'):
logger.info ("Copying %s to %s\n"%(xmlfile, bulkroot+'/job'))
# use os.rename instead of shutil.move to avoid problems traversing devices
os.rename(xmlfile, bulkroot+'/job/'+xmlfile)
exec_cmd('time cpimport', '-j 299 -b %s'%cache['col'])
exec_cmd('time cpimport', '-j 299 -l %s'%cache['col'])
exec_cmd('time cpimport', '-j 300 -i -o %s'%cache['idx'])
logger.info("Over-riding HWM in job file - setting to 0")
fix_hwm(bulkroot+'/job/Job_300.xml')
## sort the files after scanning index job file for mapName(s)
logger.info ("Sorting indexes before insertion")
indexes = find_indexes(bulkroot+'/job/Job_300.xml')
for index in indexes:
data_file='%s/data/import/%s.dat.idx.txt'%(bulkroot, index)
sort_file ='%s/data/import/%s.dat.idx.sort'%(bulkroot, index)
exec_cmd('time sort',' -k1 -n %s > %s'%(data_file, sort_file))
# use os.rename instead of shutil.move to avoid problems traversing devices
os.rename( sort_file, data_file)
logger.info("Inserting indexes")
try:
logger.info("Trying with -m option")
exec_cmd('cpimport', '-j 300 -m -i -s %s'%cache['idx'])
except:
try:
logger.warn("cpimport with -m option failed, fall back to regular options")
exec_cmd('cpimport', '-j 300 -i -s %s'%cache['idx'])
except:
logger.error("Index load failed") | [
"def",
"main",
"(",
")",
":",
"start_dir",
"=",
"curdir",
"=",
"os",
".",
"getcwd",
"(",
")",
"# remember where we started",
"if",
"not",
"os",
".",
"access",
"(",
"'.'",
",",
"os",
".",
"W_OK",
")",
":",
"os",
".",
"chdir",
"(",
"'/tmp'",
")",
"logger",
".",
"warn",
"(",
"'Changing to /tmp to have permission to write files'",
")",
"if",
"not",
"os",
".",
"environ",
".",
"has_key",
"(",
"'LD_LIBRARY_PATH'",
")",
":",
"logger",
".",
"info",
"(",
"'No environment variable LD_LIBRARY_PATH'",
")",
"else",
":",
"if",
"len",
"(",
"os",
".",
"getenv",
"(",
"'LD_LIBRARY_PATH'",
")",
")",
"<",
"5",
":",
"logger",
".",
"info",
"(",
"'Suspicous LD_LIBRARY_PATH: %s'",
"%",
"os",
".",
"getenv",
"(",
"'LD_LIBRARY_PATH'",
")",
")",
"#-- figure out paths",
"home",
"=",
"os",
".",
"getenv",
"(",
"'HOME'",
")",
"cache",
"=",
"{",
"}",
"cache",
"[",
"'idx'",
"]",
"=",
"'-c'",
"cache",
"[",
"'col'",
"]",
"=",
"'-c'",
"#-- allow us to specify a write engine branch",
"opts",
",",
"args",
"=",
"getopt",
".",
"getopt",
"(",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
",",
"'n:u'",
",",
"[",
"'nocache='",
",",
"'usage'",
"]",
")",
"for",
"opt",
",",
"arg",
"in",
"opts",
":",
"if",
"opt",
"==",
"'-n'",
"or",
"opt",
"==",
"'--nocache'",
":",
"if",
"(",
"arg",
"==",
"'idx'",
"or",
"arg",
"==",
"'col'",
")",
":",
"cache",
"[",
"arg",
"]",
"=",
"''",
"logger",
".",
"info",
"(",
"\"No cache for %s\"",
"%",
"arg",
")",
"if",
"opt",
"==",
"'-u'",
"or",
"opt",
"==",
"'--usage'",
":",
"usage",
"(",
")",
"sys",
".",
"exit",
"(",
")",
"(",
"bulkroot",
",",
"dbroot",
")",
"=",
"find_paths",
"(",
")",
"logger",
".",
"info",
"(",
"\"Bulkroot: %s \\tDBRoot: %s\\n\"",
"%",
"(",
"bulkroot",
",",
"dbroot",
")",
")",
"check_dirs",
"(",
"bulkroot",
",",
"dbroot",
")",
"if",
"len",
"(",
"glob",
".",
"glob",
"(",
"bulkroot",
"+",
"'/data/import/*tbl'",
")",
")",
"==",
"0",
":",
"sys",
".",
"exit",
"(",
"\"No files for import found in BulkRoot: %s\"",
"%",
"(",
"bulkroot",
")",
")",
"if",
"len",
"(",
"glob",
".",
"glob",
"(",
"dbroot",
"+",
"'/000.dir'",
")",
")",
"==",
"0",
":",
"logger",
".",
"info",
"(",
"\"No files found in DBRoot: %s (not fatal)\"",
"%",
"dbroot",
")",
"## qa version does not build any tools. Cease and desist if any tools missing",
"toolset",
"=",
"[",
"'dbbuilder'",
",",
"'cpimport'",
",",
"'ipcs-pat'",
",",
"'PrimProc'",
"]",
"for",
"tool",
"in",
"toolset",
":",
"try",
":",
"res",
"=",
"os",
".",
"system",
"(",
"'which %s'",
"%",
"tool",
")",
"finally",
":",
"if",
"res",
":",
"logger",
".",
"error",
"(",
"\"Fatal error: %s not found\"",
"%",
"tool",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"## clean up before starting",
"## remove old db files, removed old temp files, remove shared memory segments, ",
"## kill old PrimProc and start new one",
"logger",
".",
"info",
"(",
"\"Removing old DB files\"",
")",
"exec_cmd",
"(",
"'rm -fr '",
",",
"dbroot",
"+",
"'/000.dir'",
")",
"logger",
".",
"info",
"(",
"\"Removing old temp files\"",
")",
"exec_cmd",
"(",
"'rm -fr '",
",",
"bulkroot",
"+",
"'/data/import/*.idx.txt'",
")",
"logger",
".",
"info",
"(",
"\"Removing shared memory segments\"",
")",
"exec_cmd",
"(",
"'ipcs-pat'",
",",
"'-d'",
")",
"logger",
".",
"info",
"(",
"\"Killing primProc\"",
")",
"os",
".",
"system",
"(",
"'killall -q -u $USER PrimProc'",
")",
"logger",
".",
"info",
"(",
"\"Starting primProc\"",
")",
"exec_cmd",
"(",
"'PrimProc'",
",",
"\"> primproc.log &\"",
")",
"## run dbbuilder",
"logger",
".",
"info",
"(",
"\"Building db and indexes (no data inserted)\"",
")",
"exec_cmd",
"(",
"'yes | dbbuilder'",
",",
"' 5'",
")",
"logger",
".",
"info",
"(",
"\"Relocating OID files\"",
")",
"for",
"file",
"in",
"[",
"'colOIDFile.dat'",
",",
"'dicOIDFile.dat'",
",",
"'indexOIDFile.dat'",
"]",
":",
"# use os.rename instead of shutil.move to avoid problems traversing devices ",
"os",
".",
"rename",
"(",
"file",
",",
"dbroot",
"+",
"'/'",
"+",
"file",
")",
"for",
"xmlfile",
"in",
"glob",
".",
"glob",
"(",
"'./Job*xml'",
")",
":",
"logger",
".",
"info",
"(",
"\"Copying %s to %s\\n\"",
"%",
"(",
"xmlfile",
",",
"bulkroot",
"+",
"'/job'",
")",
")",
"# use os.rename instead of shutil.move to avoid problems traversing devices ",
"os",
".",
"rename",
"(",
"xmlfile",
",",
"bulkroot",
"+",
"'/job/'",
"+",
"xmlfile",
")",
"exec_cmd",
"(",
"'time cpimport'",
",",
"'-j 299 -b %s'",
"%",
"cache",
"[",
"'col'",
"]",
")",
"exec_cmd",
"(",
"'time cpimport'",
",",
"'-j 299 -l %s'",
"%",
"cache",
"[",
"'col'",
"]",
")",
"exec_cmd",
"(",
"'time cpimport'",
",",
"'-j 300 -i -o %s'",
"%",
"cache",
"[",
"'idx'",
"]",
")",
"logger",
".",
"info",
"(",
"\"Over-riding HWM in job file - setting to 0\"",
")",
"fix_hwm",
"(",
"bulkroot",
"+",
"'/job/Job_300.xml'",
")",
"## sort the files after scanning index job file for mapName(s)",
"logger",
".",
"info",
"(",
"\"Sorting indexes before insertion\"",
")",
"indexes",
"=",
"find_indexes",
"(",
"bulkroot",
"+",
"'/job/Job_300.xml'",
")",
"for",
"index",
"in",
"indexes",
":",
"data_file",
"=",
"'%s/data/import/%s.dat.idx.txt'",
"%",
"(",
"bulkroot",
",",
"index",
")",
"sort_file",
"=",
"'%s/data/import/%s.dat.idx.sort'",
"%",
"(",
"bulkroot",
",",
"index",
")",
"exec_cmd",
"(",
"'time sort'",
",",
"' -k1 -n %s > %s'",
"%",
"(",
"data_file",
",",
"sort_file",
")",
")",
"# use os.rename instead of shutil.move to avoid problems traversing devices ",
"os",
".",
"rename",
"(",
"sort_file",
",",
"data_file",
")",
"logger",
".",
"info",
"(",
"\"Inserting indexes\"",
")",
"try",
":",
"logger",
".",
"info",
"(",
"\"Trying with -m option\"",
")",
"exec_cmd",
"(",
"'cpimport'",
",",
"'-j 300 -m -i -s %s'",
"%",
"cache",
"[",
"'idx'",
"]",
")",
"except",
":",
"try",
":",
"logger",
".",
"warn",
"(",
"\"cpimport with -m option failed, fall back to regular options\"",
")",
"exec_cmd",
"(",
"'cpimport'",
",",
"'-j 300 -i -s %s'",
"%",
"cache",
"[",
"'idx'",
"]",
")",
"except",
":",
"logger",
".",
"error",
"(",
"\"Index load failed\"",
")"
]
| https://github.com/infinidb/infinidb/blob/6c9f5dfdabc41ad80e81ba9e1a4eb0d7271a5d23/writeengine/bulk/qa-bulkload.py#L178-L300 |
||
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | tools/android/loading/core_set.py | python | _Spawn | (site_list_file, graph_sets, input_dir, output_dir, workers) | Spool site computation out to a multiprocessing pool. | Spool site computation out to a multiprocessing pool. | [
"Spool",
"site",
"computation",
"out",
"to",
"a",
"multiprocessing",
"pool",
"."
]
| def _Spawn(site_list_file, graph_sets, input_dir, output_dir, workers):
"""Spool site computation out to a multiprocessing pool."""
with file(site_list_file) as site_file:
sites = [l.strip() for l in site_file.readlines()]
_Progress('Using sites:\n %s' % '\n '.join(sites))
pool = multiprocessing.Pool(workers, maxtasksperchild=1)
pool.map(_DoSiteRedirect, [(s, graph_sets, input_dir, output_dir)
for s in sites]) | [
"def",
"_Spawn",
"(",
"site_list_file",
",",
"graph_sets",
",",
"input_dir",
",",
"output_dir",
",",
"workers",
")",
":",
"with",
"file",
"(",
"site_list_file",
")",
"as",
"site_file",
":",
"sites",
"=",
"[",
"l",
".",
"strip",
"(",
")",
"for",
"l",
"in",
"site_file",
".",
"readlines",
"(",
")",
"]",
"_Progress",
"(",
"'Using sites:\\n %s'",
"%",
"'\\n '",
".",
"join",
"(",
"sites",
")",
")",
"pool",
"=",
"multiprocessing",
".",
"Pool",
"(",
"workers",
",",
"maxtasksperchild",
"=",
"1",
")",
"pool",
".",
"map",
"(",
"_DoSiteRedirect",
",",
"[",
"(",
"s",
",",
"graph_sets",
",",
"input_dir",
",",
"output_dir",
")",
"for",
"s",
"in",
"sites",
"]",
")"
]
| https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/android/loading/core_set.py#L81-L88 |
||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/lookup_ops.py | python | TableInitializerBase.value_dtype | (self) | return self._value_dtype | The expected table value dtype. | The expected table value dtype. | [
"The",
"expected",
"table",
"value",
"dtype",
"."
]
| def value_dtype(self):
"""The expected table value dtype."""
return self._value_dtype | [
"def",
"value_dtype",
"(",
"self",
")",
":",
"return",
"self",
".",
"_value_dtype"
]
| https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/lookup_ops.py#L494-L496 |
|
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/framework/ops.py | python | set_shapes_for_outputs | (op) | Uses the registered shape functions to set the shapes for op's outputs. | Uses the registered shape functions to set the shapes for op's outputs. | [
"Uses",
"the",
"registered",
"shape",
"functions",
"to",
"set",
"the",
"shapes",
"for",
"op",
"s",
"outputs",
"."
]
| def set_shapes_for_outputs(op):
"""Uses the registered shape functions to set the shapes for op's outputs."""
try:
shape_func = _shape_registry.lookup(op.type)
except LookupError:
try:
shape_func = _default_shape_function_registry.lookup(op.type)
except LookupError:
shape_func = _call_cpp_shape_fn_and_require_op
shapes = shape_func(op)
if shapes is None:
raise RuntimeError(
"Shape function for op %s did not return any shapes" % op)
elif isinstance(shapes, dict):
# Returned by call_cpp_shape_fn
shapes_dict = shapes
shapes = shapes_dict["shapes"]
handle_datas = shapes_dict["handle_data"]
for output, handle_data in zip(op.outputs, handle_datas):
# pylint: disable=protected-access
output._handle_data = handle_data
# pylint: enable=protected-access
if len(op.outputs) != len(shapes):
raise RuntimeError(
"Shape function for op %s returned %d shapes but expected %d %s %s" %
(op, len(shapes), len(op.outputs), shape_func.__name__, str(shapes)))
for output, s in zip(op.outputs, shapes):
output.set_shape(s) | [
"def",
"set_shapes_for_outputs",
"(",
"op",
")",
":",
"try",
":",
"shape_func",
"=",
"_shape_registry",
".",
"lookup",
"(",
"op",
".",
"type",
")",
"except",
"LookupError",
":",
"try",
":",
"shape_func",
"=",
"_default_shape_function_registry",
".",
"lookup",
"(",
"op",
".",
"type",
")",
"except",
"LookupError",
":",
"shape_func",
"=",
"_call_cpp_shape_fn_and_require_op",
"shapes",
"=",
"shape_func",
"(",
"op",
")",
"if",
"shapes",
"is",
"None",
":",
"raise",
"RuntimeError",
"(",
"\"Shape function for op %s did not return any shapes\"",
"%",
"op",
")",
"elif",
"isinstance",
"(",
"shapes",
",",
"dict",
")",
":",
"# Returned by call_cpp_shape_fn",
"shapes_dict",
"=",
"shapes",
"shapes",
"=",
"shapes_dict",
"[",
"\"shapes\"",
"]",
"handle_datas",
"=",
"shapes_dict",
"[",
"\"handle_data\"",
"]",
"for",
"output",
",",
"handle_data",
"in",
"zip",
"(",
"op",
".",
"outputs",
",",
"handle_datas",
")",
":",
"# pylint: disable=protected-access",
"output",
".",
"_handle_data",
"=",
"handle_data",
"# pylint: enable=protected-access",
"if",
"len",
"(",
"op",
".",
"outputs",
")",
"!=",
"len",
"(",
"shapes",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Shape function for op %s returned %d shapes but expected %d %s %s\"",
"%",
"(",
"op",
",",
"len",
"(",
"shapes",
")",
",",
"len",
"(",
"op",
".",
"outputs",
")",
",",
"shape_func",
".",
"__name__",
",",
"str",
"(",
"shapes",
")",
")",
")",
"for",
"output",
",",
"s",
"in",
"zip",
"(",
"op",
".",
"outputs",
",",
"shapes",
")",
":",
"output",
".",
"set_shape",
"(",
"s",
")"
]
| https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/framework/ops.py#L1901-L1930 |
||
google/clif | cab24d6a105609a65c95a36a1712ae3c20c7b5df | clif/python/proto.py | python | _PyName | (desc, pkg) | return desc.fqname[len(pkg)+1:] | Return the Python name of the entity in |desc| from proto package |pkg|. | Return the Python name of the entity in |desc| from proto package |pkg|. | [
"Return",
"the",
"Python",
"name",
"of",
"the",
"entity",
"in",
"|desc|",
"from",
"proto",
"package",
"|pkg|",
"."
]
| def _PyName(desc, pkg):
"""Return the Python name of the entity in |desc| from proto package |pkg|."""
if not pkg: return desc.fqname
assert desc.fqname.startswith(pkg)
return desc.fqname[len(pkg)+1:] | [
"def",
"_PyName",
"(",
"desc",
",",
"pkg",
")",
":",
"if",
"not",
"pkg",
":",
"return",
"desc",
".",
"fqname",
"assert",
"desc",
".",
"fqname",
".",
"startswith",
"(",
"pkg",
")",
"return",
"desc",
".",
"fqname",
"[",
"len",
"(",
"pkg",
")",
"+",
"1",
":",
"]"
]
| https://github.com/google/clif/blob/cab24d6a105609a65c95a36a1712ae3c20c7b5df/clif/python/proto.py#L65-L69 |
|
Dobiasd/frugally-deep | 99d9378c6ef537a209bcb2a102e953899a6ab0e3 | keras_export/convert_model.py | python | show_softmax_layer | (layer) | Serialize softmax layer to dict | Serialize softmax layer to dict | [
"Serialize",
"softmax",
"layer",
"to",
"dict"
]
| def show_softmax_layer(layer):
"""Serialize softmax layer to dict"""
assert layer.axis == -1 | [
"def",
"show_softmax_layer",
"(",
"layer",
")",
":",
"assert",
"layer",
".",
"axis",
"==",
"-",
"1"
]
| https://github.com/Dobiasd/frugally-deep/blob/99d9378c6ef537a209bcb2a102e953899a6ab0e3/keras_export/convert_model.py#L487-L489 |
||
BitMEX/api-connectors | 37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812 | auto-generated/python/swagger_client/models/affiliate.py | python | Affiliate.prev_comm | (self) | return self._prev_comm | Gets the prev_comm of this Affiliate. # noqa: E501
:return: The prev_comm of this Affiliate. # noqa: E501
:rtype: float | Gets the prev_comm of this Affiliate. # noqa: E501 | [
"Gets",
"the",
"prev_comm",
"of",
"this",
"Affiliate",
".",
"#",
"noqa",
":",
"E501"
]
| def prev_comm(self):
"""Gets the prev_comm of this Affiliate. # noqa: E501
:return: The prev_comm of this Affiliate. # noqa: E501
:rtype: float
"""
return self._prev_comm | [
"def",
"prev_comm",
"(",
"self",
")",
":",
"return",
"self",
".",
"_prev_comm"
]
| https://github.com/BitMEX/api-connectors/blob/37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812/auto-generated/python/swagger_client/models/affiliate.py#L217-L224 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py3/scipy/stats/mstats_basic.py | python | trim | (a, limits=None, inclusive=(True,True), relative=False, axis=None) | Trims an array by masking the data outside some given limits.
Returns a masked version of the input array.
%s
Examples
--------
>>> from scipy.stats.mstats import trim
>>> z = [ 1, 2, 3, 4, 5, 6, 7, 8, 9,10]
>>> print(trim(z,(3,8)))
[-- -- 3 4 5 6 7 8 -- --]
>>> print(trim(z,(0.1,0.2),relative=True))
[-- 2 3 4 5 6 7 8 -- --] | Trims an array by masking the data outside some given limits. | [
"Trims",
"an",
"array",
"by",
"masking",
"the",
"data",
"outside",
"some",
"given",
"limits",
"."
]
| def trim(a, limits=None, inclusive=(True,True), relative=False, axis=None):
"""
Trims an array by masking the data outside some given limits.
Returns a masked version of the input array.
%s
Examples
--------
>>> from scipy.stats.mstats import trim
>>> z = [ 1, 2, 3, 4, 5, 6, 7, 8, 9,10]
>>> print(trim(z,(3,8)))
[-- -- 3 4 5 6 7 8 -- --]
>>> print(trim(z,(0.1,0.2),relative=True))
[-- 2 3 4 5 6 7 8 -- --]
"""
if relative:
return trimr(a, limits=limits, inclusive=inclusive, axis=axis)
else:
return trima(a, limits=limits, inclusive=inclusive) | [
"def",
"trim",
"(",
"a",
",",
"limits",
"=",
"None",
",",
"inclusive",
"=",
"(",
"True",
",",
"True",
")",
",",
"relative",
"=",
"False",
",",
"axis",
"=",
"None",
")",
":",
"if",
"relative",
":",
"return",
"trimr",
"(",
"a",
",",
"limits",
"=",
"limits",
",",
"inclusive",
"=",
"inclusive",
",",
"axis",
"=",
"axis",
")",
"else",
":",
"return",
"trima",
"(",
"a",
",",
"limits",
"=",
"limits",
",",
"inclusive",
"=",
"inclusive",
")"
]
| https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/stats/mstats_basic.py#L1445-L1466 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/py/editor.py | python | EditorShellNotebookFrame.bufferDestroy | (self) | Destroy the current buffer. | Destroy the current buffer. | [
"Destroy",
"the",
"current",
"buffer",
"."
]
| def bufferDestroy(self):
"""Destroy the current buffer."""
if self.buffer:
self.editor = None
del self.buffers[self.buffer.id]
self.buffer = None # Do this before DeletePage().
if self._singlefile:
self.notebook.Destroy()
self.notebook = None
else:
selection = self.notebook.GetSelection()
## print "Destroy Selection:", selection
self.notebook.DeletePage(selection) | [
"def",
"bufferDestroy",
"(",
"self",
")",
":",
"if",
"self",
".",
"buffer",
":",
"self",
".",
"editor",
"=",
"None",
"del",
"self",
".",
"buffers",
"[",
"self",
".",
"buffer",
".",
"id",
"]",
"self",
".",
"buffer",
"=",
"None",
"# Do this before DeletePage().",
"if",
"self",
".",
"_singlefile",
":",
"self",
".",
"notebook",
".",
"Destroy",
"(",
")",
"self",
".",
"notebook",
"=",
"None",
"else",
":",
"selection",
"=",
"self",
".",
"notebook",
".",
"GetSelection",
"(",
")",
"## print \"Destroy Selection:\", selection",
"self",
".",
"notebook",
".",
"DeletePage",
"(",
"selection",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/py/editor.py#L472-L484 |
||
rodeofx/OpenWalter | 6116fbe3f04f1146c854afbfbdbe944feaee647e | walter/maya/scripts/walterPanel/walterOutliner.py | python | TreeModel.setData | (self, index, value, role=QtCore.Qt.EditRole) | return False | Set the role data for the item at index to value. | Set the role data for the item at index to value. | [
"Set",
"the",
"role",
"data",
"for",
"the",
"item",
"at",
"index",
"to",
"value",
"."
]
| def setData(self, index, value, role=QtCore.Qt.EditRole):
"""Set the role data for the item at index to value."""
if not index.isValid():
return False
if role == QtCore.Qt.EditRole and value:
item = index.internalPointer()
originItem = item.getOriginItem()
self.traverser.rename(originItem.getName(), item.getName(), value)
item.setName(value)
return True
elif role == SHADER:
item = index.internalPointer()
item.setOverride(value, self.treeView().getLayer(), OVERRIDE_SHADER)
self.dataChanged.emit(index, QtCore.QModelIndex())
return True
elif role == DISPLACEMENT:
item = index.internalPointer()
item.setOverride(
value, self.treeView().getLayer(), OVERRIDE_DISPLACEMENT)
self.dataChanged.emit(index, QtCore.QModelIndex())
return True
elif role == ATTRIBUTE:
item = index.internalPointer()
item.setOverride(
value, self.treeView().getLayer(), OVERRIDE_ATTRIBUTE)
self.dataChanged.emit(index, QtCore.QModelIndex())
return True
elif role == TRANSFORM:
item = index.internalPointer()
item.setOverride(
value, self.treeView().getLayer(), OVERRIDE_TRANSFORM)
self.dataChanged.emit(index, QtCore.QModelIndex())
return True
elif role == SHADER_ICON:
item = index.internalPointer()
item.setSurfaceIconColor(value)
self.dataChanged.emit(index, QtCore.QModelIndex())
return True
return False | [
"def",
"setData",
"(",
"self",
",",
"index",
",",
"value",
",",
"role",
"=",
"QtCore",
".",
"Qt",
".",
"EditRole",
")",
":",
"if",
"not",
"index",
".",
"isValid",
"(",
")",
":",
"return",
"False",
"if",
"role",
"==",
"QtCore",
".",
"Qt",
".",
"EditRole",
"and",
"value",
":",
"item",
"=",
"index",
".",
"internalPointer",
"(",
")",
"originItem",
"=",
"item",
".",
"getOriginItem",
"(",
")",
"self",
".",
"traverser",
".",
"rename",
"(",
"originItem",
".",
"getName",
"(",
")",
",",
"item",
".",
"getName",
"(",
")",
",",
"value",
")",
"item",
".",
"setName",
"(",
"value",
")",
"return",
"True",
"elif",
"role",
"==",
"SHADER",
":",
"item",
"=",
"index",
".",
"internalPointer",
"(",
")",
"item",
".",
"setOverride",
"(",
"value",
",",
"self",
".",
"treeView",
"(",
")",
".",
"getLayer",
"(",
")",
",",
"OVERRIDE_SHADER",
")",
"self",
".",
"dataChanged",
".",
"emit",
"(",
"index",
",",
"QtCore",
".",
"QModelIndex",
"(",
")",
")",
"return",
"True",
"elif",
"role",
"==",
"DISPLACEMENT",
":",
"item",
"=",
"index",
".",
"internalPointer",
"(",
")",
"item",
".",
"setOverride",
"(",
"value",
",",
"self",
".",
"treeView",
"(",
")",
".",
"getLayer",
"(",
")",
",",
"OVERRIDE_DISPLACEMENT",
")",
"self",
".",
"dataChanged",
".",
"emit",
"(",
"index",
",",
"QtCore",
".",
"QModelIndex",
"(",
")",
")",
"return",
"True",
"elif",
"role",
"==",
"ATTRIBUTE",
":",
"item",
"=",
"index",
".",
"internalPointer",
"(",
")",
"item",
".",
"setOverride",
"(",
"value",
",",
"self",
".",
"treeView",
"(",
")",
".",
"getLayer",
"(",
")",
",",
"OVERRIDE_ATTRIBUTE",
")",
"self",
".",
"dataChanged",
".",
"emit",
"(",
"index",
",",
"QtCore",
".",
"QModelIndex",
"(",
")",
")",
"return",
"True",
"elif",
"role",
"==",
"TRANSFORM",
":",
"item",
"=",
"index",
".",
"internalPointer",
"(",
")",
"item",
".",
"setOverride",
"(",
"value",
",",
"self",
".",
"treeView",
"(",
")",
".",
"getLayer",
"(",
")",
",",
"OVERRIDE_TRANSFORM",
")",
"self",
".",
"dataChanged",
".",
"emit",
"(",
"index",
",",
"QtCore",
".",
"QModelIndex",
"(",
")",
")",
"return",
"True",
"elif",
"role",
"==",
"SHADER_ICON",
":",
"item",
"=",
"index",
".",
"internalPointer",
"(",
")",
"item",
".",
"setSurfaceIconColor",
"(",
"value",
")",
"self",
".",
"dataChanged",
".",
"emit",
"(",
"index",
",",
"QtCore",
".",
"QModelIndex",
"(",
")",
")",
"return",
"True",
"return",
"False"
]
| https://github.com/rodeofx/OpenWalter/blob/6116fbe3f04f1146c854afbfbdbe944feaee647e/walter/maya/scripts/walterPanel/walterOutliner.py#L947-L995 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_windows.py | python | PyWindow.DoSetSize | (*args, **kwargs) | return _windows_.PyWindow_DoSetSize(*args, **kwargs) | DoSetSize(self, int x, int y, int width, int height, int sizeFlags=SIZE_AUTO) | DoSetSize(self, int x, int y, int width, int height, int sizeFlags=SIZE_AUTO) | [
"DoSetSize",
"(",
"self",
"int",
"x",
"int",
"y",
"int",
"width",
"int",
"height",
"int",
"sizeFlags",
"=",
"SIZE_AUTO",
")"
]
| def DoSetSize(*args, **kwargs):
"""DoSetSize(self, int x, int y, int width, int height, int sizeFlags=SIZE_AUTO)"""
return _windows_.PyWindow_DoSetSize(*args, **kwargs) | [
"def",
"DoSetSize",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"PyWindow_DoSetSize",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_windows.py#L4158-L4160 |
|
mongodb/mongo | d8ff665343ad29cf286ee2cf4a1960d29371937b | buildscripts/resmokelib/powercycle/lib/__init__.py | python | PowercycleCommand.is_windows | () | return sys.platform == "win32" or sys.platform == "cygwin" | :return: True if running on Windows. | :return: True if running on Windows. | [
":",
"return",
":",
"True",
"if",
"running",
"on",
"Windows",
"."
]
| def is_windows() -> bool:
""":return: True if running on Windows."""
return sys.platform == "win32" or sys.platform == "cygwin" | [
"def",
"is_windows",
"(",
")",
"->",
"bool",
":",
"return",
"sys",
".",
"platform",
"==",
"\"win32\"",
"or",
"sys",
".",
"platform",
"==",
"\"cygwin\""
]
| https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/resmokelib/powercycle/lib/__init__.py#L40-L42 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/distutils/command/install_egg_info.py | python | to_filename | (name) | return name.replace('-','_') | Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'. | Convert a project or version name to its filename-escaped form | [
"Convert",
"a",
"project",
"or",
"version",
"name",
"to",
"its",
"filename",
"-",
"escaped",
"form"
]
| def to_filename(name):
"""Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'.
"""
return name.replace('-','_') | [
"def",
"to_filename",
"(",
"name",
")",
":",
"return",
"name",
".",
"replace",
"(",
"'-'",
",",
"'_'",
")"
]
| https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/distutils/command/install_egg_info.py#L72-L77 |
|
miyosuda/TensorFlowAndroidDemo | 35903e0221aa5f109ea2dbef27f20b52e317f42d | jni-build/jni/include/tensorflow/models/image/cifar10/cifar10_train.py | python | train | () | Train CIFAR-10 for a number of steps. | Train CIFAR-10 for a number of steps. | [
"Train",
"CIFAR",
"-",
"10",
"for",
"a",
"number",
"of",
"steps",
"."
]
| def train():
"""Train CIFAR-10 for a number of steps."""
with tf.Graph().as_default():
global_step = tf.Variable(0, trainable=False)
# Get images and labels for CIFAR-10.
images, labels = cifar10.distorted_inputs()
# Build a Graph that computes the logits predictions from the
# inference model.
logits = cifar10.inference(images)
# Calculate loss.
loss = cifar10.loss(logits, labels)
# Build a Graph that trains the model with one batch of examples and
# updates the model parameters.
train_op = cifar10.train(loss, global_step)
# Create a saver.
saver = tf.train.Saver(tf.all_variables())
# Build the summary operation based on the TF collection of Summaries.
summary_op = tf.merge_all_summaries()
# Build an initialization operation to run below.
init = tf.initialize_all_variables()
# Start running operations on the Graph.
sess = tf.Session(config=tf.ConfigProto(
log_device_placement=FLAGS.log_device_placement))
sess.run(init)
# Start the queue runners.
tf.train.start_queue_runners(sess=sess)
summary_writer = tf.train.SummaryWriter(FLAGS.train_dir, sess.graph)
for step in xrange(FLAGS.max_steps):
start_time = time.time()
_, loss_value = sess.run([train_op, loss])
duration = time.time() - start_time
assert not np.isnan(loss_value), 'Model diverged with loss = NaN'
if step % 10 == 0:
num_examples_per_step = FLAGS.batch_size
examples_per_sec = num_examples_per_step / duration
sec_per_batch = float(duration)
format_str = ('%s: step %d, loss = %.2f (%.1f examples/sec; %.3f '
'sec/batch)')
print (format_str % (datetime.now(), step, loss_value,
examples_per_sec, sec_per_batch))
if step % 100 == 0:
summary_str = sess.run(summary_op)
summary_writer.add_summary(summary_str, step)
# Save the model checkpoint periodically.
if step % 1000 == 0 or (step + 1) == FLAGS.max_steps:
checkpoint_path = os.path.join(FLAGS.train_dir, 'model.ckpt')
saver.save(sess, checkpoint_path, global_step=step) | [
"def",
"train",
"(",
")",
":",
"with",
"tf",
".",
"Graph",
"(",
")",
".",
"as_default",
"(",
")",
":",
"global_step",
"=",
"tf",
".",
"Variable",
"(",
"0",
",",
"trainable",
"=",
"False",
")",
"# Get images and labels for CIFAR-10.",
"images",
",",
"labels",
"=",
"cifar10",
".",
"distorted_inputs",
"(",
")",
"# Build a Graph that computes the logits predictions from the",
"# inference model.",
"logits",
"=",
"cifar10",
".",
"inference",
"(",
"images",
")",
"# Calculate loss.",
"loss",
"=",
"cifar10",
".",
"loss",
"(",
"logits",
",",
"labels",
")",
"# Build a Graph that trains the model with one batch of examples and",
"# updates the model parameters.",
"train_op",
"=",
"cifar10",
".",
"train",
"(",
"loss",
",",
"global_step",
")",
"# Create a saver.",
"saver",
"=",
"tf",
".",
"train",
".",
"Saver",
"(",
"tf",
".",
"all_variables",
"(",
")",
")",
"# Build the summary operation based on the TF collection of Summaries.",
"summary_op",
"=",
"tf",
".",
"merge_all_summaries",
"(",
")",
"# Build an initialization operation to run below.",
"init",
"=",
"tf",
".",
"initialize_all_variables",
"(",
")",
"# Start running operations on the Graph.",
"sess",
"=",
"tf",
".",
"Session",
"(",
"config",
"=",
"tf",
".",
"ConfigProto",
"(",
"log_device_placement",
"=",
"FLAGS",
".",
"log_device_placement",
")",
")",
"sess",
".",
"run",
"(",
"init",
")",
"# Start the queue runners.",
"tf",
".",
"train",
".",
"start_queue_runners",
"(",
"sess",
"=",
"sess",
")",
"summary_writer",
"=",
"tf",
".",
"train",
".",
"SummaryWriter",
"(",
"FLAGS",
".",
"train_dir",
",",
"sess",
".",
"graph",
")",
"for",
"step",
"in",
"xrange",
"(",
"FLAGS",
".",
"max_steps",
")",
":",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"_",
",",
"loss_value",
"=",
"sess",
".",
"run",
"(",
"[",
"train_op",
",",
"loss",
"]",
")",
"duration",
"=",
"time",
".",
"time",
"(",
")",
"-",
"start_time",
"assert",
"not",
"np",
".",
"isnan",
"(",
"loss_value",
")",
",",
"'Model diverged with loss = NaN'",
"if",
"step",
"%",
"10",
"==",
"0",
":",
"num_examples_per_step",
"=",
"FLAGS",
".",
"batch_size",
"examples_per_sec",
"=",
"num_examples_per_step",
"/",
"duration",
"sec_per_batch",
"=",
"float",
"(",
"duration",
")",
"format_str",
"=",
"(",
"'%s: step %d, loss = %.2f (%.1f examples/sec; %.3f '",
"'sec/batch)'",
")",
"print",
"(",
"format_str",
"%",
"(",
"datetime",
".",
"now",
"(",
")",
",",
"step",
",",
"loss_value",
",",
"examples_per_sec",
",",
"sec_per_batch",
")",
")",
"if",
"step",
"%",
"100",
"==",
"0",
":",
"summary_str",
"=",
"sess",
".",
"run",
"(",
"summary_op",
")",
"summary_writer",
".",
"add_summary",
"(",
"summary_str",
",",
"step",
")",
"# Save the model checkpoint periodically.",
"if",
"step",
"%",
"1000",
"==",
"0",
"or",
"(",
"step",
"+",
"1",
")",
"==",
"FLAGS",
".",
"max_steps",
":",
"checkpoint_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"FLAGS",
".",
"train_dir",
",",
"'model.ckpt'",
")",
"saver",
".",
"save",
"(",
"sess",
",",
"checkpoint_path",
",",
"global_step",
"=",
"step",
")"
]
| https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/models/image/cifar10/cifar10_train.py#L60-L122 |
||
baidu/bigflow | 449245016c0df7d1252e85581e588bfc60cefad3 | bigflow_python/python/bigflow/pipeline/pipeline_base.py | python | PipelineBase._register_extension | (self, extension) | Experimental interface to register an extension
:param extension:
:return: | Experimental interface to register an extension | [
"Experimental",
"interface",
"to",
"register",
"an",
"extension"
]
| def _register_extension(self, extension):
""" Experimental interface to register an extension
:param extension:
:return:
"""
from bigflow.extension import BigFlowExtension
if not isinstance(extension, BigFlowExtension):
raise error.BigflowRuntimeException("Extension must be subclass of "
"bigflow.extension.BigFlowExtension")
else:
extension.setup(self) # setup the extension
self._extensions.append(extension) | [
"def",
"_register_extension",
"(",
"self",
",",
"extension",
")",
":",
"from",
"bigflow",
".",
"extension",
"import",
"BigFlowExtension",
"if",
"not",
"isinstance",
"(",
"extension",
",",
"BigFlowExtension",
")",
":",
"raise",
"error",
".",
"BigflowRuntimeException",
"(",
"\"Extension must be subclass of \"",
"\"bigflow.extension.BigFlowExtension\"",
")",
"else",
":",
"extension",
".",
"setup",
"(",
"self",
")",
"# setup the extension",
"self",
".",
"_extensions",
".",
"append",
"(",
"extension",
")"
]
| https://github.com/baidu/bigflow/blob/449245016c0df7d1252e85581e588bfc60cefad3/bigflow_python/python/bigflow/pipeline/pipeline_base.py#L248-L260 |
||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/HFIR_4Circle_Reduction/reduce4circleGUI.py | python | MainWindow.update_merge_message | (self, exp_number, scan_number, mode, message) | Update the merge-scan table for message such as error or etc.
Note: the string passed from PyQt message is of type unicode but not string!
:param exp_number:
:param scan_number:
:param mode:
:param message:
:return: | Update the merge-scan table for message such as error or etc.
Note: the string passed from PyQt message is of type unicode but not string!
:param exp_number:
:param scan_number:
:param mode:
:param message:
:return: | [
"Update",
"the",
"merge",
"-",
"scan",
"table",
"for",
"message",
"such",
"as",
"error",
"or",
"etc",
".",
"Note",
":",
"the",
"string",
"passed",
"from",
"PyQt",
"message",
"is",
"of",
"type",
"unicode",
"but",
"not",
"string!",
":",
"param",
"exp_number",
":",
":",
"param",
"scan_number",
":",
":",
"param",
"mode",
":",
":",
"param",
"message",
":",
":",
"return",
":"
]
| def update_merge_message(self, exp_number, scan_number, mode, message):
"""
Update the merge-scan table for message such as error or etc.
Note: the string passed from PyQt message is of type unicode but not string!
:param exp_number:
:param scan_number:
:param mode:
:param message:
:return:
"""
# check
assert isinstance(exp_number, int), 'Experiment number must be integer.'
assert isinstance(scan_number, int), 'Scan number must be integer.'
assert isinstance(mode, int), 'Mode %s must be integer but not %s.' \
'' % (str(mode), type(mode))
assert isinstance(message, str) or isinstance(message, unicode),\
'Message %s must be a string/unicode but not %s.' % (str(message), type(message))
# passed value from PyQt signal might be a unicode code
message = str(message)
try:
row_number = self.ui.tableWidget_mergeScans.get_row_by_scan(scan_number)
except RuntimeError as run_err:
self.pop_one_button_dialog(str(run_err))
return
# set intensity, state to table
if mode == 0:
# error message
self.ui.tableWidget_mergeScans.set_peak_intensity(row_number, peak_intensity=0., corrected_intensity=0.,
standard_error=0., integrate_method='simple')
self.ui.tableWidget_mergeScans.set_status(row_number=row_number, status=message)
# set peak value
status, ret_message = self._myControl.set_zero_peak_intensity(exp_number, scan_number)
if not status:
self.pop_one_button_dialog(ret_message)
elif mode == 1:
# merged workspace name
merged_ws_name = message
self.ui.tableWidget_mergeScans.set_ws_name(row_number=row_number, merged_md_name=merged_ws_name)
else:
raise RuntimeError('Peak-merging mode %d is not supported.' % mode) | [
"def",
"update_merge_message",
"(",
"self",
",",
"exp_number",
",",
"scan_number",
",",
"mode",
",",
"message",
")",
":",
"# check",
"assert",
"isinstance",
"(",
"exp_number",
",",
"int",
")",
",",
"'Experiment number must be integer.'",
"assert",
"isinstance",
"(",
"scan_number",
",",
"int",
")",
",",
"'Scan number must be integer.'",
"assert",
"isinstance",
"(",
"mode",
",",
"int",
")",
",",
"'Mode %s must be integer but not %s.'",
"''",
"%",
"(",
"str",
"(",
"mode",
")",
",",
"type",
"(",
"mode",
")",
")",
"assert",
"isinstance",
"(",
"message",
",",
"str",
")",
"or",
"isinstance",
"(",
"message",
",",
"unicode",
")",
",",
"'Message %s must be a string/unicode but not %s.'",
"%",
"(",
"str",
"(",
"message",
")",
",",
"type",
"(",
"message",
")",
")",
"# passed value from PyQt signal might be a unicode code",
"message",
"=",
"str",
"(",
"message",
")",
"try",
":",
"row_number",
"=",
"self",
".",
"ui",
".",
"tableWidget_mergeScans",
".",
"get_row_by_scan",
"(",
"scan_number",
")",
"except",
"RuntimeError",
"as",
"run_err",
":",
"self",
".",
"pop_one_button_dialog",
"(",
"str",
"(",
"run_err",
")",
")",
"return",
"# set intensity, state to table",
"if",
"mode",
"==",
"0",
":",
"# error message",
"self",
".",
"ui",
".",
"tableWidget_mergeScans",
".",
"set_peak_intensity",
"(",
"row_number",
",",
"peak_intensity",
"=",
"0.",
",",
"corrected_intensity",
"=",
"0.",
",",
"standard_error",
"=",
"0.",
",",
"integrate_method",
"=",
"'simple'",
")",
"self",
".",
"ui",
".",
"tableWidget_mergeScans",
".",
"set_status",
"(",
"row_number",
"=",
"row_number",
",",
"status",
"=",
"message",
")",
"# set peak value",
"status",
",",
"ret_message",
"=",
"self",
".",
"_myControl",
".",
"set_zero_peak_intensity",
"(",
"exp_number",
",",
"scan_number",
")",
"if",
"not",
"status",
":",
"self",
".",
"pop_one_button_dialog",
"(",
"ret_message",
")",
"elif",
"mode",
"==",
"1",
":",
"# merged workspace name",
"merged_ws_name",
"=",
"message",
"self",
".",
"ui",
".",
"tableWidget_mergeScans",
".",
"set_ws_name",
"(",
"row_number",
"=",
"row_number",
",",
"merged_md_name",
"=",
"merged_ws_name",
")",
"else",
":",
"raise",
"RuntimeError",
"(",
"'Peak-merging mode %d is not supported.'",
"%",
"mode",
")"
]
| https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/HFIR_4Circle_Reduction/reduce4circleGUI.py#L4117-L4162 |
||
lammps/lammps | b75c3065430a75b1b5543a10e10f46d9b4c91913 | tools/i-pi/ipi/engine/barostats.py | python | Barostat.get_press | (self) | return np.trace(self.stress)/3.0 | Calculates the internal pressure. | Calculates the internal pressure. | [
"Calculates",
"the",
"internal",
"pressure",
"."
]
| def get_press(self):
"""Calculates the internal pressure."""
return np.trace(self.stress)/3.0 | [
"def",
"get_press",
"(",
"self",
")",
":",
"return",
"np",
".",
"trace",
"(",
"self",
".",
"stress",
")",
"/",
"3.0"
]
| https://github.com/lammps/lammps/blob/b75c3065430a75b1b5543a10e10f46d9b4c91913/tools/i-pi/ipi/engine/barostats.py#L203-L206 |
|
lilypond/lilypond | 2a14759372979f5b796ee802b0ee3bc15d28b06b | python/musicexp.py | python | Score.set_tempo | (self, tempo) | Set the tempo attribute of the Score.
This attribute can be used in L{print_ly} for the midi output (see L{musicxml.Sound}).
@param tempo: The value of the tempo, in beats per minute.
@type tempo: String | Set the tempo attribute of the Score.
This attribute can be used in L{print_ly} for the midi output (see L{musicxml.Sound}). | [
"Set",
"the",
"tempo",
"attribute",
"of",
"the",
"Score",
".",
"This",
"attribute",
"can",
"be",
"used",
"in",
"L",
"{",
"print_ly",
"}",
"for",
"the",
"midi",
"output",
"(",
"see",
"L",
"{",
"musicxml",
".",
"Sound",
"}",
")",
"."
]
| def set_tempo(self, tempo):
"""
Set the tempo attribute of the Score.
This attribute can be used in L{print_ly} for the midi output (see L{musicxml.Sound}).
@param tempo: The value of the tempo, in beats per minute.
@type tempo: String
"""
self.tempo = tempo | [
"def",
"set_tempo",
"(",
"self",
",",
"tempo",
")",
":",
"self",
".",
"tempo",
"=",
"tempo"
]
| https://github.com/lilypond/lilypond/blob/2a14759372979f5b796ee802b0ee3bc15d28b06b/python/musicexp.py#L2603-L2611 |
||
p4lang/behavioral-model | 81ce0163f0770c6b9d6056a28ce2e0cc035bb6e9 | tools/cpplint.py | python | FileInfo.BaseName | (self) | return self.Split()[1] | File base name - text after the final slash, before the final period. | File base name - text after the final slash, before the final period. | [
"File",
"base",
"name",
"-",
"text",
"after",
"the",
"final",
"slash",
"before",
"the",
"final",
"period",
"."
]
| def BaseName(self):
"""File base name - text after the final slash, before the final period."""
return self.Split()[1] | [
"def",
"BaseName",
"(",
"self",
")",
":",
"return",
"self",
".",
"Split",
"(",
")",
"[",
"1",
"]"
]
| https://github.com/p4lang/behavioral-model/blob/81ce0163f0770c6b9d6056a28ce2e0cc035bb6e9/tools/cpplint.py#L1638-L1640 |
|
RamadhanAmizudin/malware | 2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1 | Fuzzbunch/fuzzbunch/pyreadline/modes/basemode.py | python | BaseMode.forward_word | (self, e) | Move forward to the end of the next word. Words are composed of
letters and digits. | Move forward to the end of the next word. Words are composed of
letters and digits. | [
"Move",
"forward",
"to",
"the",
"end",
"of",
"the",
"next",
"word",
".",
"Words",
"are",
"composed",
"of",
"letters",
"and",
"digits",
"."
]
| def forward_word(self, e): # (M-f)
'''Move forward to the end of the next word. Words are composed of
letters and digits.'''
self.l_buffer.forward_word(self.argument_reset) | [
"def",
"forward_word",
"(",
"self",
",",
"e",
")",
":",
"# (M-f)",
"self",
".",
"l_buffer",
".",
"forward_word",
"(",
"self",
".",
"argument_reset",
")"
]
| https://github.com/RamadhanAmizudin/malware/blob/2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1/Fuzzbunch/fuzzbunch/pyreadline/modes/basemode.py#L253-L256 |
||
mongodb/mongo | d8ff665343ad29cf286ee2cf4a1960d29371937b | src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/FS.py | python | Entry.rfile | (self) | return File.rfile(self) | We're a generic Entry, but the caller is actually looking for
a File at this point, so morph into one. | We're a generic Entry, but the caller is actually looking for
a File at this point, so morph into one. | [
"We",
"re",
"a",
"generic",
"Entry",
"but",
"the",
"caller",
"is",
"actually",
"looking",
"for",
"a",
"File",
"at",
"this",
"point",
"so",
"morph",
"into",
"one",
"."
]
| def rfile(self):
"""We're a generic Entry, but the caller is actually looking for
a File at this point, so morph into one."""
self.__class__ = File
self._morph()
self.clear()
return File.rfile(self) | [
"def",
"rfile",
"(",
"self",
")",
":",
"self",
".",
"__class__",
"=",
"File",
"self",
".",
"_morph",
"(",
")",
"self",
".",
"clear",
"(",
")",
"return",
"File",
".",
"rfile",
"(",
"self",
")"
]
| https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/FS.py#L999-L1005 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_gdi.py | python | PseudoDC.SetPalette | (*args, **kwargs) | return _gdi_.PseudoDC_SetPalette(*args, **kwargs) | SetPalette(self, Palette palette)
If this is a window DC or memory DC, assigns the given palette to the
window or bitmap associated with the DC. If the argument is
``wx.NullPalette``, the current palette is selected out of the device
context, and the original palette restored. | SetPalette(self, Palette palette) | [
"SetPalette",
"(",
"self",
"Palette",
"palette",
")"
]
| def SetPalette(*args, **kwargs):
"""
SetPalette(self, Palette palette)
If this is a window DC or memory DC, assigns the given palette to the
window or bitmap associated with the DC. If the argument is
``wx.NullPalette``, the current palette is selected out of the device
context, and the original palette restored.
"""
return _gdi_.PseudoDC_SetPalette(*args, **kwargs) | [
"def",
"SetPalette",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_gdi_",
".",
"PseudoDC_SetPalette",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_gdi.py#L8429-L8438 |
|
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | tools/cr/cr/actions/action.py | python | Action.Skipping | (self, context) | return self.name == 'skip' | A method that is used to detect void or skip implementations.
Most actions have a skip version that you can select to indicate that you
want to not perform the action at all.
It is important that commands can detect this so they can modify the action
sequence if there are other changes that depend on it (for instance not
performing actions that were only there to produce the inputs of an action
that is being skipped).
Args:
context: the cr context to test within.
Returns:
True if this implementation is a skip action. | A method that is used to detect void or skip implementations. | [
"A",
"method",
"that",
"is",
"used",
"to",
"detect",
"void",
"or",
"skip",
"implementations",
"."
]
| def Skipping(self, context):
"""A method that is used to detect void or skip implementations.
Most actions have a skip version that you can select to indicate that you
want to not perform the action at all.
It is important that commands can detect this so they can modify the action
sequence if there are other changes that depend on it (for instance not
performing actions that were only there to produce the inputs of an action
that is being skipped).
Args:
context: the cr context to test within.
Returns:
True if this implementation is a skip action.
"""
return self.name == 'skip' | [
"def",
"Skipping",
"(",
"self",
",",
"context",
")",
":",
"return",
"self",
".",
"name",
"==",
"'skip'"
]
| https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/cr/cr/actions/action.py#L32-L47 |
|
giuspen/cherrytree | 84712f206478fcf9acf30174009ad28c648c6344 | pygtk2/modules/tablez.py | python | TablesHandler.table_copy | (self, *args) | Copy Table | Copy Table | [
"Copy",
"Table"
]
| def table_copy(self, *args):
"""Copy Table"""
self.dad.object_set_selection(self.curr_table_anchor)
self.dad.sourceview.emit("copy-clipboard") | [
"def",
"table_copy",
"(",
"self",
",",
"*",
"args",
")",
":",
"self",
".",
"dad",
".",
"object_set_selection",
"(",
"self",
".",
"curr_table_anchor",
")",
"self",
".",
"dad",
".",
"sourceview",
".",
"emit",
"(",
"\"copy-clipboard\"",
")"
]
| https://github.com/giuspen/cherrytree/blob/84712f206478fcf9acf30174009ad28c648c6344/pygtk2/modules/tablez.py#L40-L43 |
||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/mimetools.py | python | encode | (input, output, encoding) | Encode common content-transfer-encodings (base64, quopri, uuencode). | Encode common content-transfer-encodings (base64, quopri, uuencode). | [
"Encode",
"common",
"content",
"-",
"transfer",
"-",
"encodings",
"(",
"base64",
"quopri",
"uuencode",
")",
"."
]
| def encode(input, output, encoding):
"""Encode common content-transfer-encodings (base64, quopri, uuencode)."""
if encoding == 'base64':
import base64
return base64.encode(input, output)
if encoding == 'quoted-printable':
import quopri
return quopri.encode(input, output, 0)
if encoding in ('uuencode', 'x-uuencode', 'uue', 'x-uue'):
import uu
return uu.encode(input, output)
if encoding in ('7bit', '8bit'):
return output.write(input.read())
if encoding in encodetab:
pipethrough(input, encodetab[encoding], output)
else:
raise ValueError, \
'unknown Content-Transfer-Encoding: %s' % encoding | [
"def",
"encode",
"(",
"input",
",",
"output",
",",
"encoding",
")",
":",
"if",
"encoding",
"==",
"'base64'",
":",
"import",
"base64",
"return",
"base64",
".",
"encode",
"(",
"input",
",",
"output",
")",
"if",
"encoding",
"==",
"'quoted-printable'",
":",
"import",
"quopri",
"return",
"quopri",
".",
"encode",
"(",
"input",
",",
"output",
",",
"0",
")",
"if",
"encoding",
"in",
"(",
"'uuencode'",
",",
"'x-uuencode'",
",",
"'uue'",
",",
"'x-uue'",
")",
":",
"import",
"uu",
"return",
"uu",
".",
"encode",
"(",
"input",
",",
"output",
")",
"if",
"encoding",
"in",
"(",
"'7bit'",
",",
"'8bit'",
")",
":",
"return",
"output",
".",
"write",
"(",
"input",
".",
"read",
"(",
")",
")",
"if",
"encoding",
"in",
"encodetab",
":",
"pipethrough",
"(",
"input",
",",
"encodetab",
"[",
"encoding",
"]",
",",
"output",
")",
"else",
":",
"raise",
"ValueError",
",",
"'unknown Content-Transfer-Encoding: %s'",
"%",
"encoding"
]
| https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/mimetools.py#L176-L193 |
||
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | third_party/tlslite/tlslite/utils/RSAKey.py | python | RSAKey.writeXMLPublicKey | (self, indent='') | return Python_RSAKey(self.n, self.e).write(indent) | Return a string containing the key.
@rtype: str
@return: A string describing the public key, in XML format. | Return a string containing the key. | [
"Return",
"a",
"string",
"containing",
"the",
"key",
"."
]
| def writeXMLPublicKey(self, indent=''):
"""Return a string containing the key.
@rtype: str
@return: A string describing the public key, in XML format.
"""
return Python_RSAKey(self.n, self.e).write(indent) | [
"def",
"writeXMLPublicKey",
"(",
"self",
",",
"indent",
"=",
"''",
")",
":",
"return",
"Python_RSAKey",
"(",
"self",
".",
"n",
",",
"self",
".",
"e",
")",
".",
"write",
"(",
"indent",
")"
]
| https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/tlslite/tlslite/utils/RSAKey.py#L218-L224 |
|
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/closure_linter/closure_linter/indentation.py | python | IndentationRules.__init__ | (self) | Initializes the IndentationRules checker. | Initializes the IndentationRules checker. | [
"Initializes",
"the",
"IndentationRules",
"checker",
"."
]
| def __init__(self):
"""Initializes the IndentationRules checker."""
self._stack = []
# Map from line number to number of characters it is off in indentation.
self._start_index_offset = {} | [
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"_stack",
"=",
"[",
"]",
"# Map from line number to number of characters it is off in indentation.",
"self",
".",
"_start_index_offset",
"=",
"{",
"}"
]
| https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/closure_linter/closure_linter/indentation.py#L113-L118 |
||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/IndirectDiffScan.py | python | IndirectDiffScan._get_temperature | (self, ws_name) | return None | Gets the sample temperature for a given workspace.
@param ws_name Name of workspace
@returns Temperature in Kelvin or None if not found | Gets the sample temperature for a given workspace. | [
"Gets",
"the",
"sample",
"temperature",
"for",
"a",
"given",
"workspace",
"."
]
| def _get_temperature(self, ws_name):
"""
Gets the sample temperature for a given workspace.
@param ws_name Name of workspace
@returns Temperature in Kelvin or None if not found
"""
instr, run_number = self._get_InstrRun(ws_name)
pad_num = config.getInstrument(instr).zeroPadding(int(run_number))
zero_padding = '0' * (pad_num - len(run_number))
run_name = instr + zero_padding + run_number
log_filename = run_name.upper() + '.log'
run = mtd[ws_name].getRun()
if self._sample_log_name in run:
# Look for temperature in logs in workspace
tmp = run[self._sample_log_name].value
value_action = {'last_value': lambda x: x[-1],
'average': lambda x: x.mean()
}
temp = value_action[self._sample_log_value](tmp)
logger.debug('Temperature %d K found for run: %s' % (temp, run_name))
return temp
else:
# Logs not in workspace, try loading from file
logger.information('Log parameter not found in workspace. Searching for log file.')
log_path = FileFinder.getFullPath(log_filename)
if log_path != '':
# Get temperature from log file
LoadLog(Workspace=ws_name, Filename=log_path)
run_logs = mtd[ws_name].getRun()
if self._sample_log_name in run_logs:
tmp = run_logs[self._sample_log_name].value
temp = tmp[-1]
logger.debug('Temperature %d K found for run: %s' % (temp, run_name))
return temp
else:
logger.warning('Log entry %s for run %s not found' % (self._sample_log_name, run_name))
else:
logger.warning('Log file for run %s not found' % run_name)
# Can't find log file
logger.warning('No temperature found for run: %s' % run_name)
return None | [
"def",
"_get_temperature",
"(",
"self",
",",
"ws_name",
")",
":",
"instr",
",",
"run_number",
"=",
"self",
".",
"_get_InstrRun",
"(",
"ws_name",
")",
"pad_num",
"=",
"config",
".",
"getInstrument",
"(",
"instr",
")",
".",
"zeroPadding",
"(",
"int",
"(",
"run_number",
")",
")",
"zero_padding",
"=",
"'0'",
"*",
"(",
"pad_num",
"-",
"len",
"(",
"run_number",
")",
")",
"run_name",
"=",
"instr",
"+",
"zero_padding",
"+",
"run_number",
"log_filename",
"=",
"run_name",
".",
"upper",
"(",
")",
"+",
"'.log'",
"run",
"=",
"mtd",
"[",
"ws_name",
"]",
".",
"getRun",
"(",
")",
"if",
"self",
".",
"_sample_log_name",
"in",
"run",
":",
"# Look for temperature in logs in workspace",
"tmp",
"=",
"run",
"[",
"self",
".",
"_sample_log_name",
"]",
".",
"value",
"value_action",
"=",
"{",
"'last_value'",
":",
"lambda",
"x",
":",
"x",
"[",
"-",
"1",
"]",
",",
"'average'",
":",
"lambda",
"x",
":",
"x",
".",
"mean",
"(",
")",
"}",
"temp",
"=",
"value_action",
"[",
"self",
".",
"_sample_log_value",
"]",
"(",
"tmp",
")",
"logger",
".",
"debug",
"(",
"'Temperature %d K found for run: %s'",
"%",
"(",
"temp",
",",
"run_name",
")",
")",
"return",
"temp",
"else",
":",
"# Logs not in workspace, try loading from file",
"logger",
".",
"information",
"(",
"'Log parameter not found in workspace. Searching for log file.'",
")",
"log_path",
"=",
"FileFinder",
".",
"getFullPath",
"(",
"log_filename",
")",
"if",
"log_path",
"!=",
"''",
":",
"# Get temperature from log file",
"LoadLog",
"(",
"Workspace",
"=",
"ws_name",
",",
"Filename",
"=",
"log_path",
")",
"run_logs",
"=",
"mtd",
"[",
"ws_name",
"]",
".",
"getRun",
"(",
")",
"if",
"self",
".",
"_sample_log_name",
"in",
"run_logs",
":",
"tmp",
"=",
"run_logs",
"[",
"self",
".",
"_sample_log_name",
"]",
".",
"value",
"temp",
"=",
"tmp",
"[",
"-",
"1",
"]",
"logger",
".",
"debug",
"(",
"'Temperature %d K found for run: %s'",
"%",
"(",
"temp",
",",
"run_name",
")",
")",
"return",
"temp",
"else",
":",
"logger",
".",
"warning",
"(",
"'Log entry %s for run %s not found'",
"%",
"(",
"self",
".",
"_sample_log_name",
",",
"run_name",
")",
")",
"else",
":",
"logger",
".",
"warning",
"(",
"'Log file for run %s not found'",
"%",
"run_name",
")",
"# Can't find log file",
"logger",
".",
"warning",
"(",
"'No temperature found for run: %s'",
"%",
"run_name",
")",
"return",
"None"
]
| https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/IndirectDiffScan.py#L173-L221 |
|
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/pythonwin/pywin/Demos/cmdserver.py | python | ThreadWriter.write | (self, str) | Write to the current thread's writer, default sys.stdout | Write to the current thread's writer, default sys.stdout | [
"Write",
"to",
"the",
"current",
"thread",
"s",
"writer",
"default",
"sys",
".",
"stdout"
]
| def write(self, str):
"Write to the current thread's writer, default sys.stdout"
self.getwriter().write(str) | [
"def",
"write",
"(",
"self",
",",
"str",
")",
":",
"self",
".",
"getwriter",
"(",
")",
".",
"write",
"(",
"str",
")"
]
| https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/pythonwin/pywin/Demos/cmdserver.py#L43-L45 |
||
gem5/gem5 | 141cc37c2d4b93959d4c249b8f7e6a8b2ef75338 | util/gem5art/artifact/gem5art/artifact/_artifactdb.py | python | ArtifactDB.searchByType | (self, typ: str, limit: int) | Returns an iterable of all artifacts in the database that match
some type. Note: Not all DB implementations will implement this
function | Returns an iterable of all artifacts in the database that match
some type. Note: Not all DB implementations will implement this
function | [
"Returns",
"an",
"iterable",
"of",
"all",
"artifacts",
"in",
"the",
"database",
"that",
"match",
"some",
"type",
".",
"Note",
":",
"Not",
"all",
"DB",
"implementations",
"will",
"implement",
"this",
"function"
]
| def searchByType(self, typ: str, limit: int) -> Iterable[Dict[str, Any]]:
"""Returns an iterable of all artifacts in the database that match
some type. Note: Not all DB implementations will implement this
function"""
raise NotImplementedError() | [
"def",
"searchByType",
"(",
"self",
",",
"typ",
":",
"str",
",",
"limit",
":",
"int",
")",
"->",
"Iterable",
"[",
"Dict",
"[",
"str",
",",
"Any",
"]",
"]",
":",
"raise",
"NotImplementedError",
"(",
")"
]
| https://github.com/gem5/gem5/blob/141cc37c2d4b93959d4c249b8f7e6a8b2ef75338/util/gem5art/artifact/gem5art/artifact/_artifactdb.py#L102-L106 |
||
NetSys/bess | ae52fc5804290fc3116daf2aef52226fafcedf5d | pybess/protobuf_to_dict.py | python | dict_to_protobuf | (pb_klass_or_instance, values, type_callable_map=REVERSE_TYPE_CALLABLE_MAP, strict=True) | return _dict_to_protobuf(instance, values, type_callable_map, strict) | Populates a protobuf model from a dictionary.
:param pb_klass_or_instance: a protobuf message class, or an protobuf instance
:type pb_klass_or_instance: a type or instance of a subclass of google.protobuf.message.Message
:param dict values: a dictionary of values. Repeated and nested values are
fully supported.
:param dict type_callable_map: a mapping of protobuf types to callables for setting
values on the target instance.
:param bool strict: complain if keys in the map are not fields on the message. | Populates a protobuf model from a dictionary. | [
"Populates",
"a",
"protobuf",
"model",
"from",
"a",
"dictionary",
"."
]
| def dict_to_protobuf(pb_klass_or_instance, values, type_callable_map=REVERSE_TYPE_CALLABLE_MAP, strict=True):
"""Populates a protobuf model from a dictionary.
:param pb_klass_or_instance: a protobuf message class, or an protobuf instance
:type pb_klass_or_instance: a type or instance of a subclass of google.protobuf.message.Message
:param dict values: a dictionary of values. Repeated and nested values are
fully supported.
:param dict type_callable_map: a mapping of protobuf types to callables for setting
values on the target instance.
:param bool strict: complain if keys in the map are not fields on the message.
"""
if isinstance(pb_klass_or_instance, Message):
instance = pb_klass_or_instance
else:
instance = pb_klass_or_instance()
return _dict_to_protobuf(instance, values, type_callable_map, strict) | [
"def",
"dict_to_protobuf",
"(",
"pb_klass_or_instance",
",",
"values",
",",
"type_callable_map",
"=",
"REVERSE_TYPE_CALLABLE_MAP",
",",
"strict",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"pb_klass_or_instance",
",",
"Message",
")",
":",
"instance",
"=",
"pb_klass_or_instance",
"else",
":",
"instance",
"=",
"pb_klass_or_instance",
"(",
")",
"return",
"_dict_to_protobuf",
"(",
"instance",
",",
"values",
",",
"type_callable_map",
",",
"strict",
")"
]
| https://github.com/NetSys/bess/blob/ae52fc5804290fc3116daf2aef52226fafcedf5d/pybess/protobuf_to_dict.py#L136-L151 |
|
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | deps/src/libxml2-2.9.1/python/libxml2.py | python | newTextReaderFilename | (URI) | return xmlTextReader(_obj=ret) | Create an xmlTextReader structure fed with the resource at
@URI | Create an xmlTextReader structure fed with the resource at | [
"Create",
"an",
"xmlTextReader",
"structure",
"fed",
"with",
"the",
"resource",
"at"
]
| def newTextReaderFilename(URI):
"""Create an xmlTextReader structure fed with the resource at
@URI """
ret = libxml2mod.xmlNewTextReaderFilename(URI)
if ret is None:raise treeError('xmlNewTextReaderFilename() failed')
return xmlTextReader(_obj=ret) | [
"def",
"newTextReaderFilename",
"(",
"URI",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlNewTextReaderFilename",
"(",
"URI",
")",
"if",
"ret",
"is",
"None",
":",
"raise",
"treeError",
"(",
"'xmlNewTextReaderFilename() failed'",
")",
"return",
"xmlTextReader",
"(",
"_obj",
"=",
"ret",
")"
]
| https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L1941-L1946 |
|
ApolloAuto/apollo-platform | 86d9dc6743b496ead18d597748ebabd34a513289 | ros/third_party/lib_aarch64/python2.7/dist-packages/rosdep2/platforms/osx.py | python | brew_detect | (resolved, exec_fn=None) | return list(filter(is_installed, resolved)) | Given a list of resolutions, return the list of installed resolutions.
:param resolved: List of HomebrewResolution objects
:returns: Filtered list of HomebrewResolution objects | Given a list of resolutions, return the list of installed resolutions. | [
"Given",
"a",
"list",
"of",
"resolutions",
"return",
"the",
"list",
"of",
"installed",
"resolutions",
"."
]
| def brew_detect(resolved, exec_fn=None):
"""Given a list of resolutions, return the list of installed resolutions.
:param resolved: List of HomebrewResolution objects
:returns: Filtered list of HomebrewResolution objects
"""
if exec_fn is None:
exec_fn = read_stdout
std_out = exec_fn(['brew', 'list'])
installed_formulae = std_out.split()
def is_installed(r):
# TODO: Does not check installed version (stable, devel, HEAD)
# TODO: Does not check origin (Tap) of formula
# TODO: Does not handle excluding options (e.g. specifying
# --without-foo for --with-foo option)
# fast fail with a quick check first, then slower check if
# really linked and for options
if not brew_strip_pkg_name(r.package) in installed_formulae:
return False
std_out = exec_fn(['brew', 'info', r.package, '--json=v1'])
try:
pkg_info = json.loads(std_out)
pkg_info = pkg_info[0]
linked_version = pkg_info['linked_keg']
if not linked_version:
return False
for spec in pkg_info['installed']:
if spec['version'] == linked_version:
installed_options = spec['used_options']
break
except (ValueError, TypeError):
e_type, e, tb = sys.exc_info()
raise RosdepInternalError(
e, """Error while parsing brew info for '{0}'
* Output of `brew info {0} --json=v1`:
{1}
* Error while parsing:
{2}""".format(r.package, std_out, "".join(traceback.format_exception(e_type, e, tb))))
if set(r.options) <= set(installed_options):
return True
else:
return False
# preserve order
return list(filter(is_installed, resolved)) | [
"def",
"brew_detect",
"(",
"resolved",
",",
"exec_fn",
"=",
"None",
")",
":",
"if",
"exec_fn",
"is",
"None",
":",
"exec_fn",
"=",
"read_stdout",
"std_out",
"=",
"exec_fn",
"(",
"[",
"'brew'",
",",
"'list'",
"]",
")",
"installed_formulae",
"=",
"std_out",
".",
"split",
"(",
")",
"def",
"is_installed",
"(",
"r",
")",
":",
"# TODO: Does not check installed version (stable, devel, HEAD)",
"# TODO: Does not check origin (Tap) of formula",
"# TODO: Does not handle excluding options (e.g. specifying",
"# --without-foo for --with-foo option)",
"# fast fail with a quick check first, then slower check if",
"# really linked and for options",
"if",
"not",
"brew_strip_pkg_name",
"(",
"r",
".",
"package",
")",
"in",
"installed_formulae",
":",
"return",
"False",
"std_out",
"=",
"exec_fn",
"(",
"[",
"'brew'",
",",
"'info'",
",",
"r",
".",
"package",
",",
"'--json=v1'",
"]",
")",
"try",
":",
"pkg_info",
"=",
"json",
".",
"loads",
"(",
"std_out",
")",
"pkg_info",
"=",
"pkg_info",
"[",
"0",
"]",
"linked_version",
"=",
"pkg_info",
"[",
"'linked_keg'",
"]",
"if",
"not",
"linked_version",
":",
"return",
"False",
"for",
"spec",
"in",
"pkg_info",
"[",
"'installed'",
"]",
":",
"if",
"spec",
"[",
"'version'",
"]",
"==",
"linked_version",
":",
"installed_options",
"=",
"spec",
"[",
"'used_options'",
"]",
"break",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"e_type",
",",
"e",
",",
"tb",
"=",
"sys",
".",
"exc_info",
"(",
")",
"raise",
"RosdepInternalError",
"(",
"e",
",",
"\"\"\"Error while parsing brew info for '{0}'\n * Output of `brew info {0} --json=v1`:\n {1}\n * Error while parsing:\n {2}\"\"\"",
".",
"format",
"(",
"r",
".",
"package",
",",
"std_out",
",",
"\"\"",
".",
"join",
"(",
"traceback",
".",
"format_exception",
"(",
"e_type",
",",
"e",
",",
"tb",
")",
")",
")",
")",
"if",
"set",
"(",
"r",
".",
"options",
")",
"<=",
"set",
"(",
"installed_options",
")",
":",
"return",
"True",
"else",
":",
"return",
"False",
"# preserve order",
"return",
"list",
"(",
"filter",
"(",
"is_installed",
",",
"resolved",
")",
")"
]
| https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_aarch64/python2.7/dist-packages/rosdep2/platforms/osx.py#L160-L208 |
|
tensorflow/io | 92b44e180674a8af0e12e405530f7343e3e693e4 | tensorflow_io/python/ops/io_tensor.py | python | IOTensor.from_tiff | (cls, filename, **kwargs) | Creates an `IOTensor` from a tiff file.
Note tiff file may consists of multiple images with different shapes.
Args:
filename: A string, the filename of a tiff file.
name: A name prefix for the IOTensor (optional).
Returns:
A `IOTensor`. | Creates an `IOTensor` from a tiff file. | [
"Creates",
"an",
"IOTensor",
"from",
"a",
"tiff",
"file",
"."
]
| def from_tiff(cls, filename, **kwargs):
"""Creates an `IOTensor` from a tiff file.
Note tiff file may consists of multiple images with different shapes.
Args:
filename: A string, the filename of a tiff file.
name: A name prefix for the IOTensor (optional).
Returns:
A `IOTensor`.
"""
with tf.name_scope(kwargs.get("name", "IOFromTIFF")):
return tiff_io_tensor_ops.TIFFIOTensor(filename, internal=True) | [
"def",
"from_tiff",
"(",
"cls",
",",
"filename",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"tf",
".",
"name_scope",
"(",
"kwargs",
".",
"get",
"(",
"\"name\"",
",",
"\"IOFromTIFF\"",
")",
")",
":",
"return",
"tiff_io_tensor_ops",
".",
"TIFFIOTensor",
"(",
"filename",
",",
"internal",
"=",
"True",
")"
]
| https://github.com/tensorflow/io/blob/92b44e180674a8af0e12e405530f7343e3e693e4/tensorflow_io/python/ops/io_tensor.py#L444-L458 |
||
google/nucleus | 68d3947fafba1337f294c0668a6e1c7f3f1273e3 | nucleus/util/ranges.py | python | parse_literal | (region_literal, contig_map=None) | Parses a Range from a string representation like chr:start-end.
The region literal must conform to the following pattern:
chromosome:start-end
chromosome:position
chromosome [if contig_map is provided]
chromosome can be any non-empty string without whitespace. start and end must
both be positive integers. They can contain commas for readability. start and
end are positions not offsets, so start == 1 means an offset of zero. If only
a single position is provided, this creates a 1 bp interval starting at
position - 1 and ending at position.
Inspired by the samtools region specification:
http://www.htslib.org/doc/samtools.html
Args:
region_literal: str. The literal to parse.
contig_map: An optional dictionary mapping from contig names to ContigInfo
protobufs. If provided, allows literals of the format "contig_name", which
will be parsed into a Range with reference_name=contig_name, start=0,
end=n_bases where n_bases comes from the ContigInfo.
Returns:
nucleus.genomics.v1.Range.
Raises:
ValueError: if region_literal cannot be parsed. | Parses a Range from a string representation like chr:start-end. | [
"Parses",
"a",
"Range",
"from",
"a",
"string",
"representation",
"like",
"chr",
":",
"start",
"-",
"end",
"."
]
| def parse_literal(region_literal, contig_map=None):
"""Parses a Range from a string representation like chr:start-end.
The region literal must conform to the following pattern:
chromosome:start-end
chromosome:position
chromosome [if contig_map is provided]
chromosome can be any non-empty string without whitespace. start and end must
both be positive integers. They can contain commas for readability. start and
end are positions not offsets, so start == 1 means an offset of zero. If only
a single position is provided, this creates a 1 bp interval starting at
position - 1 and ending at position.
Inspired by the samtools region specification:
http://www.htslib.org/doc/samtools.html
Args:
region_literal: str. The literal to parse.
contig_map: An optional dictionary mapping from contig names to ContigInfo
protobufs. If provided, allows literals of the format "contig_name", which
will be parsed into a Range with reference_name=contig_name, start=0,
end=n_bases where n_bases comes from the ContigInfo.
Returns:
nucleus.genomics.v1.Range.
Raises:
ValueError: if region_literal cannot be parsed.
"""
def parse_position(pos_str):
return int(pos_str.replace(',', ''))
matched = _REGION_LITERAL_REGEXP.match(region_literal)
if matched:
chrom, start, end = matched.groups()
return make_range(chrom, parse_position(start) - 1, parse_position(end))
matched = _POSITION_LITERAL_REGEXP.match(region_literal)
if matched:
chrom, pos = matched.groups()
pos = parse_position(pos)
return make_range(chrom, pos - 1, pos)
if contig_map and region_literal in contig_map:
# If the region_literals is an exact contig name like chr1 or MT return a
# range over the entire contig.
return make_range(region_literal, 0, contig_map[region_literal].n_bases)
raise ValueError(
'Could not parse "{}" as a region literal. Region literals '
'should have the form "chr:start-stop" or "chr:start" or '
'just "chr". A common error is to use the "chr" prefix on '
'inputs that don\'t have it, or vice-versa.'.format(region_literal)) | [
"def",
"parse_literal",
"(",
"region_literal",
",",
"contig_map",
"=",
"None",
")",
":",
"def",
"parse_position",
"(",
"pos_str",
")",
":",
"return",
"int",
"(",
"pos_str",
".",
"replace",
"(",
"','",
",",
"''",
")",
")",
"matched",
"=",
"_REGION_LITERAL_REGEXP",
".",
"match",
"(",
"region_literal",
")",
"if",
"matched",
":",
"chrom",
",",
"start",
",",
"end",
"=",
"matched",
".",
"groups",
"(",
")",
"return",
"make_range",
"(",
"chrom",
",",
"parse_position",
"(",
"start",
")",
"-",
"1",
",",
"parse_position",
"(",
"end",
")",
")",
"matched",
"=",
"_POSITION_LITERAL_REGEXP",
".",
"match",
"(",
"region_literal",
")",
"if",
"matched",
":",
"chrom",
",",
"pos",
"=",
"matched",
".",
"groups",
"(",
")",
"pos",
"=",
"parse_position",
"(",
"pos",
")",
"return",
"make_range",
"(",
"chrom",
",",
"pos",
"-",
"1",
",",
"pos",
")",
"if",
"contig_map",
"and",
"region_literal",
"in",
"contig_map",
":",
"# If the region_literals is an exact contig name like chr1 or MT return a",
"# range over the entire contig.",
"return",
"make_range",
"(",
"region_literal",
",",
"0",
",",
"contig_map",
"[",
"region_literal",
"]",
".",
"n_bases",
")",
"raise",
"ValueError",
"(",
"'Could not parse \"{}\" as a region literal. Region literals '",
"'should have the form \"chr:start-stop\" or \"chr:start\" or '",
"'just \"chr\". A common error is to use the \"chr\" prefix on '",
"'inputs that don\\'t have it, or vice-versa.'",
".",
"format",
"(",
"region_literal",
")",
")"
]
| https://github.com/google/nucleus/blob/68d3947fafba1337f294c0668a6e1c7f3f1273e3/nucleus/util/ranges.py#L519-L573 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_controls.py | python | Gauge.GetBezelFace | (*args, **kwargs) | return _controls_.Gauge_GetBezelFace(*args, **kwargs) | GetBezelFace(self) -> int | GetBezelFace(self) -> int | [
"GetBezelFace",
"(",
"self",
")",
"-",
">",
"int"
]
| def GetBezelFace(*args, **kwargs):
"""GetBezelFace(self) -> int"""
return _controls_.Gauge_GetBezelFace(*args, **kwargs) | [
"def",
"GetBezelFace",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"Gauge_GetBezelFace",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_controls.py#L783-L785 |
|
SFTtech/openage | d6a08c53c48dc1e157807471df92197f6ca9e04d | openage/convert/processor/conversion/aoc/ability_subprocessor.py | python | AoCAbilitySubprocessor.research_ability | (line) | return ability_forward_ref | Adds the Research ability to a line.
:param line: Unit/Building line that gets the ability.
:type line: ...dataformat.converter_object.ConverterObjectGroup
:returns: The forward reference for the ability.
:rtype: ...dataformat.forward_ref.ForwardRef | Adds the Research ability to a line. | [
"Adds",
"the",
"Research",
"ability",
"to",
"a",
"line",
"."
]
| def research_ability(line):
"""
Adds the Research ability to a line.
:param line: Unit/Building line that gets the ability.
:type line: ...dataformat.converter_object.ConverterObjectGroup
:returns: The forward reference for the ability.
:rtype: ...dataformat.forward_ref.ForwardRef
"""
current_unit_id = line.get_head_unit_id()
dataset = line.data
api_objects = dataset.nyan_api_objects
name_lookup_dict = internal_name_lookups.get_entity_lookups(dataset.game_version)
tech_lookup_dict = internal_name_lookups.get_tech_lookups(dataset.game_version)
game_entity_name = name_lookup_dict[current_unit_id][0]
ability_ref = f"{game_entity_name}.Research"
ability_raw_api_object = RawAPIObject(ability_ref,
"Research",
dataset.nyan_api_objects)
ability_raw_api_object.add_raw_parent("engine.ability.type.Research")
ability_location = ForwardRef(line, game_entity_name)
ability_raw_api_object.set_location(ability_location)
line.add_raw_api_object(ability_raw_api_object)
# Diplomacy settings
property_ref = f"{ability_ref}.Diplomatic"
property_raw_api_object = RawAPIObject(property_ref,
"Diplomatic",
dataset.nyan_api_objects)
property_raw_api_object.add_raw_parent("engine.ability.property.type.Diplomatic")
property_location = ForwardRef(line, ability_ref)
property_raw_api_object.set_location(property_location)
line.add_raw_api_object(property_raw_api_object)
diplomatic_stances = [dataset.nyan_api_objects["engine.util.diplomatic_stance.type.Self"]]
property_raw_api_object.add_raw_member("stances", diplomatic_stances,
"engine.ability.property.type.Diplomatic")
property_forward_ref = ForwardRef(line, property_ref)
properties = {
api_objects["engine.ability.property.type.Diplomatic"]: property_forward_ref
}
ability_raw_api_object.add_raw_member("properties",
properties,
"engine.ability.Ability")
researchables_set = []
for researchable in line.researches:
if researchable.is_unique():
# Skip this because unique techs are handled by civs
continue
# ResearchableTech objects are created for each unit/building
# line individually to avoid duplicates. We just point to the
# raw API objects here.
researchable_id = researchable.get_id()
researchable_name = tech_lookup_dict[researchable_id][0]
raw_api_object_ref = f"{researchable_name}.ResearchableTech"
researchable_forward_ref = ForwardRef(researchable,
raw_api_object_ref)
researchables_set.append(researchable_forward_ref)
ability_raw_api_object.add_raw_member("researchables", researchables_set,
"engine.ability.type.Research")
ability_forward_ref = ForwardRef(line, ability_raw_api_object.get_id())
return ability_forward_ref | [
"def",
"research_ability",
"(",
"line",
")",
":",
"current_unit_id",
"=",
"line",
".",
"get_head_unit_id",
"(",
")",
"dataset",
"=",
"line",
".",
"data",
"api_objects",
"=",
"dataset",
".",
"nyan_api_objects",
"name_lookup_dict",
"=",
"internal_name_lookups",
".",
"get_entity_lookups",
"(",
"dataset",
".",
"game_version",
")",
"tech_lookup_dict",
"=",
"internal_name_lookups",
".",
"get_tech_lookups",
"(",
"dataset",
".",
"game_version",
")",
"game_entity_name",
"=",
"name_lookup_dict",
"[",
"current_unit_id",
"]",
"[",
"0",
"]",
"ability_ref",
"=",
"f\"{game_entity_name}.Research\"",
"ability_raw_api_object",
"=",
"RawAPIObject",
"(",
"ability_ref",
",",
"\"Research\"",
",",
"dataset",
".",
"nyan_api_objects",
")",
"ability_raw_api_object",
".",
"add_raw_parent",
"(",
"\"engine.ability.type.Research\"",
")",
"ability_location",
"=",
"ForwardRef",
"(",
"line",
",",
"game_entity_name",
")",
"ability_raw_api_object",
".",
"set_location",
"(",
"ability_location",
")",
"line",
".",
"add_raw_api_object",
"(",
"ability_raw_api_object",
")",
"# Diplomacy settings",
"property_ref",
"=",
"f\"{ability_ref}.Diplomatic\"",
"property_raw_api_object",
"=",
"RawAPIObject",
"(",
"property_ref",
",",
"\"Diplomatic\"",
",",
"dataset",
".",
"nyan_api_objects",
")",
"property_raw_api_object",
".",
"add_raw_parent",
"(",
"\"engine.ability.property.type.Diplomatic\"",
")",
"property_location",
"=",
"ForwardRef",
"(",
"line",
",",
"ability_ref",
")",
"property_raw_api_object",
".",
"set_location",
"(",
"property_location",
")",
"line",
".",
"add_raw_api_object",
"(",
"property_raw_api_object",
")",
"diplomatic_stances",
"=",
"[",
"dataset",
".",
"nyan_api_objects",
"[",
"\"engine.util.diplomatic_stance.type.Self\"",
"]",
"]",
"property_raw_api_object",
".",
"add_raw_member",
"(",
"\"stances\"",
",",
"diplomatic_stances",
",",
"\"engine.ability.property.type.Diplomatic\"",
")",
"property_forward_ref",
"=",
"ForwardRef",
"(",
"line",
",",
"property_ref",
")",
"properties",
"=",
"{",
"api_objects",
"[",
"\"engine.ability.property.type.Diplomatic\"",
"]",
":",
"property_forward_ref",
"}",
"ability_raw_api_object",
".",
"add_raw_member",
"(",
"\"properties\"",
",",
"properties",
",",
"\"engine.ability.Ability\"",
")",
"researchables_set",
"=",
"[",
"]",
"for",
"researchable",
"in",
"line",
".",
"researches",
":",
"if",
"researchable",
".",
"is_unique",
"(",
")",
":",
"# Skip this because unique techs are handled by civs",
"continue",
"# ResearchableTech objects are created for each unit/building",
"# line individually to avoid duplicates. We just point to the",
"# raw API objects here.",
"researchable_id",
"=",
"researchable",
".",
"get_id",
"(",
")",
"researchable_name",
"=",
"tech_lookup_dict",
"[",
"researchable_id",
"]",
"[",
"0",
"]",
"raw_api_object_ref",
"=",
"f\"{researchable_name}.ResearchableTech\"",
"researchable_forward_ref",
"=",
"ForwardRef",
"(",
"researchable",
",",
"raw_api_object_ref",
")",
"researchables_set",
".",
"append",
"(",
"researchable_forward_ref",
")",
"ability_raw_api_object",
".",
"add_raw_member",
"(",
"\"researchables\"",
",",
"researchables_set",
",",
"\"engine.ability.type.Research\"",
")",
"ability_forward_ref",
"=",
"ForwardRef",
"(",
"line",
",",
"ability_raw_api_object",
".",
"get_id",
"(",
")",
")",
"return",
"ability_forward_ref"
]
| https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/convert/processor/conversion/aoc/ability_subprocessor.py#L5328-L5401 |
|
ArduPilot/apm_planner | bc6721a6b875ede1116a2a4cb6f9d3d74cd9a6ef | sik_uploader/sik_uploader.py | python | uploader.debug | (self, s, level=1) | write some debug text | write some debug text | [
"write",
"some",
"debug",
"text"
]
| def debug(self, s, level=1):
'''write some debug text'''
if self._debug >= level:
print(s) | [
"def",
"debug",
"(",
"self",
",",
"s",
",",
"level",
"=",
"1",
")",
":",
"if",
"self",
".",
"_debug",
">=",
"level",
":",
"print",
"(",
"s",
")"
]
| https://github.com/ArduPilot/apm_planner/blob/bc6721a6b875ede1116a2a4cb6f9d3d74cd9a6ef/sik_uploader/sik_uploader.py#L113-L116 |
||
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/encodings/hex_codec.py | python | hex_decode | (input,errors='strict') | return (output, len(input)) | Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec. | Decodes the object input and returns a tuple (output
object, length consumed). | [
"Decodes",
"the",
"object",
"input",
"and",
"returns",
"a",
"tuple",
"(",
"output",
"object",
"length",
"consumed",
")",
"."
]
| def hex_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.a2b_hex(input)
return (output, len(input)) | [
"def",
"hex_decode",
"(",
"input",
",",
"errors",
"=",
"'strict'",
")",
":",
"assert",
"errors",
"==",
"'strict'",
"output",
"=",
"binascii",
".",
"a2b_hex",
"(",
"input",
")",
"return",
"(",
"output",
",",
"len",
"(",
"input",
")",
")"
]
| https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/encodings/hex_codec.py#L27-L43 |
|
milvus-io/milvus | 3b1030de2b6c39e3512833e97f6044d63eb24237 | internal/core/build-support/cpplint.py | python | CheckBracesSpacing | (filename, clean_lines, linenum, nesting_state, error) | Checks for horizontal spacing near commas.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
nesting_state: A NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: The function to call with any errors found. | Checks for horizontal spacing near commas. | [
"Checks",
"for",
"horizontal",
"spacing",
"near",
"commas",
"."
]
| def CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error):
"""Checks for horizontal spacing near commas.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
nesting_state: A NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
# Except after an opening paren, or after another opening brace (in case of
# an initializer list, for instance), you should have spaces before your
# braces when they are delimiting blocks, classes, namespaces etc.
# And since you should never have braces at the beginning of a line,
# this is an easy test. Except that braces used for initialization don't
# follow the same rule; we often don't want spaces before those.
match = Match(r'^(.*[^ ({>]){', line)
if match:
# Try a bit harder to check for brace initialization. This
# happens in one of the following forms:
# Constructor() : initializer_list_{} { ... }
# Constructor{}.MemberFunction()
# Type variable{};
# FunctionCall(type{}, ...);
# LastArgument(..., type{});
# LOG(INFO) << type{} << " ...";
# map_of_type[{...}] = ...;
# ternary = expr ? new type{} : nullptr;
# OuterTemplate<InnerTemplateConstructor<Type>{}>
#
# We check for the character following the closing brace, and
# silence the warning if it's one of those listed above, i.e.
# "{.;,)<>]:".
#
# To account for nested initializer list, we allow any number of
# closing braces up to "{;,)<". We can't simply silence the
# warning on first sight of closing brace, because that would
# cause false negatives for things that are not initializer lists.
# Silence this: But not this:
# Outer{ if (...) {
# Inner{...} if (...){ // Missing space before {
# }; }
#
# There is a false negative with this approach if people inserted
# spurious semicolons, e.g. "if (cond){};", but we will catch the
# spurious semicolon with a separate check.
leading_text = match.group(1)
(endline, endlinenum, endpos) = CloseExpression(
clean_lines, linenum, len(match.group(1)))
trailing_text = ''
if endpos > -1:
trailing_text = endline[endpos:]
for offset in xrange(endlinenum + 1,
min(endlinenum + 3, clean_lines.NumLines() - 1)):
trailing_text += clean_lines.elided[offset]
# We also suppress warnings for `uint64_t{expression}` etc., as the style
# guide recommends brace initialization for integral types to avoid
# overflow/truncation.
if (not Match(r'^[\s}]*[{.;,)<>\]:]', trailing_text)
and not _IsType(clean_lines, nesting_state, leading_text)):
error(filename, linenum, 'whitespace/braces', 5,
'Missing space before {')
# Make sure '} else {' has spaces.
if Search(r'}else', line):
error(filename, linenum, 'whitespace/braces', 5,
'Missing space before else')
# You shouldn't have a space before a semicolon at the end of the line.
# There's a special case for "for" since the style guide allows space before
# the semicolon there.
if Search(r':\s*;\s*$', line):
error(filename, linenum, 'whitespace/semicolon', 5,
'Semicolon defining empty statement. Use {} instead.')
elif Search(r'^\s*;\s*$', line):
error(filename, linenum, 'whitespace/semicolon', 5,
'Line contains only semicolon. If this should be an empty statement, '
'use {} instead.')
elif (Search(r'\s+;\s*$', line) and
not Search(r'\bfor\b', line)):
error(filename, linenum, 'whitespace/semicolon', 5,
'Extra space before last semicolon. If this should be an empty '
'statement, use {} instead.') | [
"def",
"CheckBracesSpacing",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"nesting_state",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"# Except after an opening paren, or after another opening brace (in case of",
"# an initializer list, for instance), you should have spaces before your",
"# braces when they are delimiting blocks, classes, namespaces etc.",
"# And since you should never have braces at the beginning of a line,",
"# this is an easy test. Except that braces used for initialization don't",
"# follow the same rule; we often don't want spaces before those.",
"match",
"=",
"Match",
"(",
"r'^(.*[^ ({>]){'",
",",
"line",
")",
"if",
"match",
":",
"# Try a bit harder to check for brace initialization. This",
"# happens in one of the following forms:",
"# Constructor() : initializer_list_{} { ... }",
"# Constructor{}.MemberFunction()",
"# Type variable{};",
"# FunctionCall(type{}, ...);",
"# LastArgument(..., type{});",
"# LOG(INFO) << type{} << \" ...\";",
"# map_of_type[{...}] = ...;",
"# ternary = expr ? new type{} : nullptr;",
"# OuterTemplate<InnerTemplateConstructor<Type>{}>",
"#",
"# We check for the character following the closing brace, and",
"# silence the warning if it's one of those listed above, i.e.",
"# \"{.;,)<>]:\".",
"#",
"# To account for nested initializer list, we allow any number of",
"# closing braces up to \"{;,)<\". We can't simply silence the",
"# warning on first sight of closing brace, because that would",
"# cause false negatives for things that are not initializer lists.",
"# Silence this: But not this:",
"# Outer{ if (...) {",
"# Inner{...} if (...){ // Missing space before {",
"# }; }",
"#",
"# There is a false negative with this approach if people inserted",
"# spurious semicolons, e.g. \"if (cond){};\", but we will catch the",
"# spurious semicolon with a separate check.",
"leading_text",
"=",
"match",
".",
"group",
"(",
"1",
")",
"(",
"endline",
",",
"endlinenum",
",",
"endpos",
")",
"=",
"CloseExpression",
"(",
"clean_lines",
",",
"linenum",
",",
"len",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
")",
"trailing_text",
"=",
"''",
"if",
"endpos",
">",
"-",
"1",
":",
"trailing_text",
"=",
"endline",
"[",
"endpos",
":",
"]",
"for",
"offset",
"in",
"xrange",
"(",
"endlinenum",
"+",
"1",
",",
"min",
"(",
"endlinenum",
"+",
"3",
",",
"clean_lines",
".",
"NumLines",
"(",
")",
"-",
"1",
")",
")",
":",
"trailing_text",
"+=",
"clean_lines",
".",
"elided",
"[",
"offset",
"]",
"# We also suppress warnings for `uint64_t{expression}` etc., as the style",
"# guide recommends brace initialization for integral types to avoid",
"# overflow/truncation.",
"if",
"(",
"not",
"Match",
"(",
"r'^[\\s}]*[{.;,)<>\\]:]'",
",",
"trailing_text",
")",
"and",
"not",
"_IsType",
"(",
"clean_lines",
",",
"nesting_state",
",",
"leading_text",
")",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/braces'",
",",
"5",
",",
"'Missing space before {'",
")",
"# Make sure '} else {' has spaces.",
"if",
"Search",
"(",
"r'}else'",
",",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/braces'",
",",
"5",
",",
"'Missing space before else'",
")",
"# You shouldn't have a space before a semicolon at the end of the line.",
"# There's a special case for \"for\" since the style guide allows space before",
"# the semicolon there.",
"if",
"Search",
"(",
"r':\\s*;\\s*$'",
",",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/semicolon'",
",",
"5",
",",
"'Semicolon defining empty statement. Use {} instead.'",
")",
"elif",
"Search",
"(",
"r'^\\s*;\\s*$'",
",",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/semicolon'",
",",
"5",
",",
"'Line contains only semicolon. If this should be an empty statement, '",
"'use {} instead.'",
")",
"elif",
"(",
"Search",
"(",
"r'\\s+;\\s*$'",
",",
"line",
")",
"and",
"not",
"Search",
"(",
"r'\\bfor\\b'",
",",
"line",
")",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/semicolon'",
",",
"5",
",",
"'Extra space before last semicolon. If this should be an empty '",
"'statement, use {} instead.'",
")"
]
| https://github.com/milvus-io/milvus/blob/3b1030de2b6c39e3512833e97f6044d63eb24237/internal/core/build-support/cpplint.py#L4048-L4134 |
||
thalium/icebox | 99d147d5b9269222225443ce171b4fd46d8985d4 | third_party/retdec-3.2/scripts/retdec-utils.py | python | CmdRunner._start | (self, cmd, buffer_output=False, discard_stdout=False, discard_stderr=False) | Starts the given command and returns a handler to it.
:param list cmd: Command to be run as a list of arguments (strings).
:param bool buffer_output: See above.
:returns: A handler to the started command (``subprocess.Popen``).
If the output is irrelevant for you, you should set `buffer_output` to
``True`` and ignore the `output`. | Starts the given command and returns a handler to it. | [
"Starts",
"the",
"given",
"command",
"and",
"returns",
"a",
"handler",
"to",
"it",
"."
]
| def _start(self, cmd, buffer_output=False, discard_stdout=False, discard_stderr=False):
"""Starts the given command and returns a handler to it.
:param list cmd: Command to be run as a list of arguments (strings).
:param bool buffer_output: See above.
:returns: A handler to the started command (``subprocess.Popen``).
If the output is irrelevant for you, you should set `buffer_output` to
``True`` and ignore the `output`.
"""
# The implementation is platform-specific because we want to be able to
# kill the children alongside with the process.
if discard_stdout:
stdout = subprocess.DEVNULL
elif buffer_output:
stdout = subprocess.PIPE
else:
stdout = None
if discard_stderr:
stderr = subprocess.DEVNULL
elif buffer_output:
if discard_stdout:
stderr = subprocess.PIPE
else:
stderr = subprocess.STDOUT
else:
stderr = None
kwargs = dict(
args=cmd,
stdin=subprocess.PIPE,
stdout=stdout,
stderr=stderr,
universal_newlines=True
)
if is_windows():
return _WindowsProcess(**kwargs)
else:
return _LinuxProcess(**kwargs) | [
"def",
"_start",
"(",
"self",
",",
"cmd",
",",
"buffer_output",
"=",
"False",
",",
"discard_stdout",
"=",
"False",
",",
"discard_stderr",
"=",
"False",
")",
":",
"# The implementation is platform-specific because we want to be able to",
"# kill the children alongside with the process.",
"if",
"discard_stdout",
":",
"stdout",
"=",
"subprocess",
".",
"DEVNULL",
"elif",
"buffer_output",
":",
"stdout",
"=",
"subprocess",
".",
"PIPE",
"else",
":",
"stdout",
"=",
"None",
"if",
"discard_stderr",
":",
"stderr",
"=",
"subprocess",
".",
"DEVNULL",
"elif",
"buffer_output",
":",
"if",
"discard_stdout",
":",
"stderr",
"=",
"subprocess",
".",
"PIPE",
"else",
":",
"stderr",
"=",
"subprocess",
".",
"STDOUT",
"else",
":",
"stderr",
"=",
"None",
"kwargs",
"=",
"dict",
"(",
"args",
"=",
"cmd",
",",
"stdin",
"=",
"subprocess",
".",
"PIPE",
",",
"stdout",
"=",
"stdout",
",",
"stderr",
"=",
"stderr",
",",
"universal_newlines",
"=",
"True",
")",
"if",
"is_windows",
"(",
")",
":",
"return",
"_WindowsProcess",
"(",
"*",
"*",
"kwargs",
")",
"else",
":",
"return",
"_LinuxProcess",
"(",
"*",
"*",
"kwargs",
")"
]
| https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/retdec-3.2/scripts/retdec-utils.py#L124-L165 |
||
nyuwireless-unipd/ns3-mmwave | 4ff9e87e8079764e04cbeccd8e85bff15ae16fb3 | src/visualizer/visualizer/core.py | python | Node._update_position | (self) | !
Update position function.
@param self: class object.
@return none | !
Update position function. | [
"!",
"Update",
"position",
"function",
"."
]
| def _update_position(self):
"""!
Update position function.
@param self: class object.
@return none
"""
x, y = self.get_position()
self.set_position(x, y) | [
"def",
"_update_position",
"(",
"self",
")",
":",
"x",
",",
"y",
"=",
"self",
".",
"get_position",
"(",
")",
"self",
".",
"set_position",
"(",
"x",
",",
"y",
")"
]
| https://github.com/nyuwireless-unipd/ns3-mmwave/blob/4ff9e87e8079764e04cbeccd8e85bff15ae16fb3/src/visualizer/visualizer/core.py#L451-L459 |
||
kamyu104/LeetCode-Solutions | 77605708a927ea3b85aee5a479db733938c7c211 | Python/number-of-matching-subsequences.py | python | Solution.numMatchingSubseq | (self, S, words) | return len(waiting[None]) | :type S: str
:type words: List[str]
:rtype: int | :type S: str
:type words: List[str]
:rtype: int | [
":",
"type",
"S",
":",
"str",
":",
"type",
"words",
":",
"List",
"[",
"str",
"]",
":",
"rtype",
":",
"int"
]
| def numMatchingSubseq(self, S, words):
"""
:type S: str
:type words: List[str]
:rtype: int
"""
waiting = collections.defaultdict(list)
for word in words:
it = iter(word)
waiting[next(it, None)].append(it)
for c in S:
for it in waiting.pop(c, ()):
waiting[next(it, None)].append(it)
return len(waiting[None]) | [
"def",
"numMatchingSubseq",
"(",
"self",
",",
"S",
",",
"words",
")",
":",
"waiting",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"for",
"word",
"in",
"words",
":",
"it",
"=",
"iter",
"(",
"word",
")",
"waiting",
"[",
"next",
"(",
"it",
",",
"None",
")",
"]",
".",
"append",
"(",
"it",
")",
"for",
"c",
"in",
"S",
":",
"for",
"it",
"in",
"waiting",
".",
"pop",
"(",
"c",
",",
"(",
")",
")",
":",
"waiting",
"[",
"next",
"(",
"it",
",",
"None",
")",
"]",
".",
"append",
"(",
"it",
")",
"return",
"len",
"(",
"waiting",
"[",
"None",
"]",
")"
]
| https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/number-of-matching-subsequences.py#L8-L21 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/decimal.py | python | Context.logical_invert | (self, a) | return a.logical_invert(context=self) | Invert all the digits in the operand.
The operand must be a logical number.
>>> ExtendedContext.logical_invert(Decimal('0'))
Decimal('111111111')
>>> ExtendedContext.logical_invert(Decimal('1'))
Decimal('111111110')
>>> ExtendedContext.logical_invert(Decimal('111111111'))
Decimal('0')
>>> ExtendedContext.logical_invert(Decimal('101010101'))
Decimal('10101010')
>>> ExtendedContext.logical_invert(1101)
Decimal('111110010') | Invert all the digits in the operand. | [
"Invert",
"all",
"the",
"digits",
"in",
"the",
"operand",
"."
]
| def logical_invert(self, a):
"""Invert all the digits in the operand.
The operand must be a logical number.
>>> ExtendedContext.logical_invert(Decimal('0'))
Decimal('111111111')
>>> ExtendedContext.logical_invert(Decimal('1'))
Decimal('111111110')
>>> ExtendedContext.logical_invert(Decimal('111111111'))
Decimal('0')
>>> ExtendedContext.logical_invert(Decimal('101010101'))
Decimal('10101010')
>>> ExtendedContext.logical_invert(1101)
Decimal('111110010')
"""
a = _convert_other(a, raiseit=True)
return a.logical_invert(context=self) | [
"def",
"logical_invert",
"(",
"self",
",",
"a",
")",
":",
"a",
"=",
"_convert_other",
"(",
"a",
",",
"raiseit",
"=",
"True",
")",
"return",
"a",
".",
"logical_invert",
"(",
"context",
"=",
"self",
")"
]
| https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/decimal.py#L4589-L4606 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/_misc.py | python | TimeSpan.IsNegative | (*args, **kwargs) | return _misc_.TimeSpan_IsNegative(*args, **kwargs) | IsNegative(self) -> bool | IsNegative(self) -> bool | [
"IsNegative",
"(",
"self",
")",
"-",
">",
"bool"
]
| def IsNegative(*args, **kwargs):
"""IsNegative(self) -> bool"""
return _misc_.TimeSpan_IsNegative(*args, **kwargs) | [
"def",
"IsNegative",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"TimeSpan_IsNegative",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_misc.py#L4498-L4500 |
|
CaoWGG/TensorRT-CenterNet | f949252e37b51e60f873808f46d3683f15735e79 | onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang/cindex.py | python | Token.spelling | (self) | return conf.lib.clang_getTokenSpelling(self._tu, self) | The spelling of this token.
This is the textual representation of the token in source. | The spelling of this token. | [
"The",
"spelling",
"of",
"this",
"token",
"."
]
| def spelling(self):
"""The spelling of this token.
This is the textual representation of the token in source.
"""
return conf.lib.clang_getTokenSpelling(self._tu, self) | [
"def",
"spelling",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_getTokenSpelling",
"(",
"self",
".",
"_tu",
",",
"self",
")"
]
| https://github.com/CaoWGG/TensorRT-CenterNet/blob/f949252e37b51e60f873808f46d3683f15735e79/onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang/cindex.py#L2992-L2997 |
|
rrwick/Unicycler | 96ffea71e3a78d63ade19d6124946773e65cf129 | unicycler/spades_func.py | python | get_max_spades_kmer | (spades_path) | SPAdes usually has a maximum k-mer size of 127, but this can be changed when compiling SPAdes,
so this function checks the help text to see what it is.
https://github.com/ablab/spades/issues/40 | SPAdes usually has a maximum k-mer size of 127, but this can be changed when compiling SPAdes,
so this function checks the help text to see what it is.
https://github.com/ablab/spades/issues/40 | [
"SPAdes",
"usually",
"has",
"a",
"maximum",
"k",
"-",
"mer",
"size",
"of",
"127",
"but",
"this",
"can",
"be",
"changed",
"when",
"compiling",
"SPAdes",
"so",
"this",
"function",
"checks",
"the",
"help",
"text",
"to",
"see",
"what",
"it",
"is",
".",
"https",
":",
"//",
"github",
".",
"com",
"/",
"ablab",
"/",
"spades",
"/",
"issues",
"/",
"40"
]
| def get_max_spades_kmer(spades_path):
"""
SPAdes usually has a maximum k-mer size of 127, but this can be changed when compiling SPAdes,
so this function checks the help text to see what it is.
https://github.com/ablab/spades/issues/40
"""
try:
process = subprocess.Popen([spades_path, '--help'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate()
all_output = out.decode() + err.decode()
all_output = all_output.replace('\n', ' ')
all_output = ' '.join(all_output.split())
max_kmer = all_output.split('must be odd and less than ')[1].split(')')[0]
return int(max_kmer) - 1
except (IndexError, ValueError):
return 127 | [
"def",
"get_max_spades_kmer",
"(",
"spades_path",
")",
":",
"try",
":",
"process",
"=",
"subprocess",
".",
"Popen",
"(",
"[",
"spades_path",
",",
"'--help'",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
")",
"out",
",",
"err",
"=",
"process",
".",
"communicate",
"(",
")",
"all_output",
"=",
"out",
".",
"decode",
"(",
")",
"+",
"err",
".",
"decode",
"(",
")",
"all_output",
"=",
"all_output",
".",
"replace",
"(",
"'\\n'",
",",
"' '",
")",
"all_output",
"=",
"' '",
".",
"join",
"(",
"all_output",
".",
"split",
"(",
")",
")",
"max_kmer",
"=",
"all_output",
".",
"split",
"(",
"'must be odd and less than '",
")",
"[",
"1",
"]",
".",
"split",
"(",
"')'",
")",
"[",
"0",
"]",
"return",
"int",
"(",
"max_kmer",
")",
"-",
"1",
"except",
"(",
"IndexError",
",",
"ValueError",
")",
":",
"return",
"127"
]
| https://github.com/rrwick/Unicycler/blob/96ffea71e3a78d63ade19d6124946773e65cf129/unicycler/spades_func.py#L304-L320 |
||
natanielruiz/android-yolo | 1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f | jni-build/jni/include/tensorflow/python/training/saver.py | python | Saver.export_meta_graph | (self, filename=None, collection_list=None,
as_text=False) | return export_meta_graph(filename=filename,
graph_def=ops.get_default_graph().as_graph_def(
add_shapes=True),
saver_def=self.saver_def,
collection_list=collection_list,
as_text=as_text) | Writes `MetaGraphDef` to save_path/filename.
Args:
filename: Optional meta_graph filename including the path.
collection_list: List of string keys to collect.
as_text: If `True`, writes the meta_graph as an ASCII proto.
Returns:
A `MetaGraphDef` proto. | Writes `MetaGraphDef` to save_path/filename. | [
"Writes",
"MetaGraphDef",
"to",
"save_path",
"/",
"filename",
"."
]
| def export_meta_graph(self, filename=None, collection_list=None,
as_text=False):
"""Writes `MetaGraphDef` to save_path/filename.
Args:
filename: Optional meta_graph filename including the path.
collection_list: List of string keys to collect.
as_text: If `True`, writes the meta_graph as an ASCII proto.
Returns:
A `MetaGraphDef` proto.
"""
return export_meta_graph(filename=filename,
graph_def=ops.get_default_graph().as_graph_def(
add_shapes=True),
saver_def=self.saver_def,
collection_list=collection_list,
as_text=as_text) | [
"def",
"export_meta_graph",
"(",
"self",
",",
"filename",
"=",
"None",
",",
"collection_list",
"=",
"None",
",",
"as_text",
"=",
"False",
")",
":",
"return",
"export_meta_graph",
"(",
"filename",
"=",
"filename",
",",
"graph_def",
"=",
"ops",
".",
"get_default_graph",
"(",
")",
".",
"as_graph_def",
"(",
"add_shapes",
"=",
"True",
")",
",",
"saver_def",
"=",
"self",
".",
"saver_def",
",",
"collection_list",
"=",
"collection_list",
",",
"as_text",
"=",
"as_text",
")"
]
| https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/python/training/saver.py#L1089-L1106 |
|
Tencent/Pebble | 68315f176d9e328a233ace29b7579a829f89879f | tools/blade/src/blade/rules_generator.py | python | SconsFileHeaderGenerator.generate_imports_functions | (self, blade_path) | Generates imports and functions. | Generates imports and functions. | [
"Generates",
"imports",
"and",
"functions",
"."
]
| def generate_imports_functions(self, blade_path):
"""Generates imports and functions. """
self._add_rule(
r"""
import sys
sys.path.insert(0, '%s')
""" % blade_path)
self._add_rule(
r"""
import os
import subprocess
import signal
import time
import socket
import glob
import blade_util
import console
import scons_helper
from build_environment import ScacheManager
from console import colors
from scons_helper import MakeAction
from scons_helper import create_fast_link_builders
from scons_helper import echospawn
from scons_helper import error_colorize
from scons_helper import generate_python_binary
from scons_helper import generate_resource_file
from scons_helper import generate_resource_index
""")
if getattr(self.options, 'verbose', False):
self._add_rule('scons_helper.option_verbose = True')
self._add_rule((
"""if not os.path.exists('%s'):
os.mkdir('%s')""") % (self.build_dir, self.build_dir)) | [
"def",
"generate_imports_functions",
"(",
"self",
",",
"blade_path",
")",
":",
"self",
".",
"_add_rule",
"(",
"r\"\"\"\nimport sys\nsys.path.insert(0, '%s')\n\"\"\"",
"%",
"blade_path",
")",
"self",
".",
"_add_rule",
"(",
"r\"\"\"\nimport os\nimport subprocess\nimport signal\nimport time\nimport socket\nimport glob\n\nimport blade_util\nimport console\nimport scons_helper\n\nfrom build_environment import ScacheManager\nfrom console import colors\nfrom scons_helper import MakeAction\nfrom scons_helper import create_fast_link_builders\nfrom scons_helper import echospawn\nfrom scons_helper import error_colorize\nfrom scons_helper import generate_python_binary\nfrom scons_helper import generate_resource_file\nfrom scons_helper import generate_resource_index\n\"\"\"",
")",
"if",
"getattr",
"(",
"self",
".",
"options",
",",
"'verbose'",
",",
"False",
")",
":",
"self",
".",
"_add_rule",
"(",
"'scons_helper.option_verbose = True'",
")",
"self",
".",
"_add_rule",
"(",
"(",
"\"\"\"if not os.path.exists('%s'):\n os.mkdir('%s')\"\"\"",
")",
"%",
"(",
"self",
".",
"build_dir",
",",
"self",
".",
"build_dir",
")",
")"
]
| https://github.com/Tencent/Pebble/blob/68315f176d9e328a233ace29b7579a829f89879f/tools/blade/src/blade/rules_generator.py#L159-L195 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/scipy/interpolate/fitpack.py | python | _intc_overflow | (x, msg=None) | return intc(x) | Cast the value to an intc and raise an OverflowError if the value
cannot fit. | Cast the value to an intc and raise an OverflowError if the value
cannot fit. | [
"Cast",
"the",
"value",
"to",
"an",
"intc",
"and",
"raise",
"an",
"OverflowError",
"if",
"the",
"value",
"cannot",
"fit",
"."
]
| def _intc_overflow(x, msg=None):
"""Cast the value to an intc and raise an OverflowError if the value
cannot fit.
"""
if x > iinfo(intc).max:
if msg is None:
msg = '%r cannot fit into an intc' % x
raise OverflowError(msg)
return intc(x) | [
"def",
"_intc_overflow",
"(",
"x",
",",
"msg",
"=",
"None",
")",
":",
"if",
"x",
">",
"iinfo",
"(",
"intc",
")",
".",
"max",
":",
"if",
"msg",
"is",
"None",
":",
"msg",
"=",
"'%r cannot fit into an intc'",
"%",
"x",
"raise",
"OverflowError",
"(",
"msg",
")",
"return",
"intc",
"(",
"x",
")"
]
| https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/interpolate/fitpack.py#L41-L49 |
|
microsoft/CNTK | e9396480025b9ca457d26b6f33dd07c474c6aa04 | bindings/python/cntk/contrib/crosstalkcaffe/unimodel/cntkinstance.py | python | ApiSetup.splice | (cntk_layer, inputs) | return ops.splice(*inputs, axis=0, name=cntk_layer.op_name) | Setup splice op with given parameters
Args:
cntk_layer (:class:`~cntk.contrib.crosstalkcaffe.unimodel.cntkmodel.CntkLayersDefinition`):
the layer definition of splice op
inputs (list): a list contains all :class:`~cntk.ops.functions.Function` or
:class:`~cntk.input`
Return:
:func:`~cntk.ops.functions.Function`: instaced cntk splice op | Setup splice op with given parameters | [
"Setup",
"splice",
"op",
"with",
"given",
"parameters"
]
| def splice(cntk_layer, inputs):
'''
Setup splice op with given parameters
Args:
cntk_layer (:class:`~cntk.contrib.crosstalkcaffe.unimodel.cntkmodel.CntkLayersDefinition`):
the layer definition of splice op
inputs (list): a list contains all :class:`~cntk.ops.functions.Function` or
:class:`~cntk.input`
Return:
:func:`~cntk.ops.functions.Function`: instaced cntk splice op
'''
return ops.splice(*inputs, axis=0, name=cntk_layer.op_name) | [
"def",
"splice",
"(",
"cntk_layer",
",",
"inputs",
")",
":",
"return",
"ops",
".",
"splice",
"(",
"*",
"inputs",
",",
"axis",
"=",
"0",
",",
"name",
"=",
"cntk_layer",
".",
"op_name",
")"
]
| https://github.com/microsoft/CNTK/blob/e9396480025b9ca457d26b6f33dd07c474c6aa04/bindings/python/cntk/contrib/crosstalkcaffe/unimodel/cntkinstance.py#L361-L374 |
|
tomahawk-player/tomahawk-resolvers | 7f827bbe410ccfdb0446f7d6a91acc2199c9cc8d | archive/spotify/breakpad/third_party/protobuf/protobuf/python/google/protobuf/internal/python_message.py | python | _Listener.__init__ | (self, parent_message) | Args:
parent_message: The message whose _Modified() method we should call when
we receive Modified() messages. | Args:
parent_message: The message whose _Modified() method we should call when
we receive Modified() messages. | [
"Args",
":",
"parent_message",
":",
"The",
"message",
"whose",
"_Modified",
"()",
"method",
"we",
"should",
"call",
"when",
"we",
"receive",
"Modified",
"()",
"messages",
"."
]
| def __init__(self, parent_message):
"""Args:
parent_message: The message whose _Modified() method we should call when
we receive Modified() messages.
"""
# This listener establishes a back reference from a child (contained) object
# to its parent (containing) object. We make this a weak reference to avoid
# creating cyclic garbage when the client finishes with the 'parent' object
# in the tree.
if isinstance(parent_message, weakref.ProxyType):
self._parent_message_weakref = parent_message
else:
self._parent_message_weakref = weakref.proxy(parent_message)
# As an optimization, we also indicate directly on the listener whether
# or not the parent message is dirty. This way we can avoid traversing
# up the tree in the common case.
self.dirty = False | [
"def",
"__init__",
"(",
"self",
",",
"parent_message",
")",
":",
"# This listener establishes a back reference from a child (contained) object",
"# to its parent (containing) object. We make this a weak reference to avoid",
"# creating cyclic garbage when the client finishes with the 'parent' object",
"# in the tree.",
"if",
"isinstance",
"(",
"parent_message",
",",
"weakref",
".",
"ProxyType",
")",
":",
"self",
".",
"_parent_message_weakref",
"=",
"parent_message",
"else",
":",
"self",
".",
"_parent_message_weakref",
"=",
"weakref",
".",
"proxy",
"(",
"parent_message",
")",
"# As an optimization, we also indicate directly on the listener whether",
"# or not the parent message is dirty. This way we can avoid traversing",
"# up the tree in the common case.",
"self",
".",
"dirty",
"=",
"False"
]
| https://github.com/tomahawk-player/tomahawk-resolvers/blob/7f827bbe410ccfdb0446f7d6a91acc2199c9cc8d/archive/spotify/breakpad/third_party/protobuf/protobuf/python/google/protobuf/internal/python_message.py#L961-L978 |
||
google/earthenterprise | 0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9 | earth_enterprise/src/google/protobuf-py/google/protobuf/internal/python_message.py | python | _AddEqualsMethod | (message_descriptor, cls) | Helper for _AddMessageMethods(). | Helper for _AddMessageMethods(). | [
"Helper",
"for",
"_AddMessageMethods",
"()",
"."
]
| def _AddEqualsMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def __eq__(self, other):
if (not isinstance(other, message_mod.Message) or
other.DESCRIPTOR != self.DESCRIPTOR):
return False
if self is other:
return True
return self.ListFields() == other.ListFields()
cls.__eq__ = __eq__ | [
"def",
"_AddEqualsMethod",
"(",
"message_descriptor",
",",
"cls",
")",
":",
"def",
"__eq__",
"(",
"self",
",",
"other",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"other",
",",
"message_mod",
".",
"Message",
")",
"or",
"other",
".",
"DESCRIPTOR",
"!=",
"self",
".",
"DESCRIPTOR",
")",
":",
"return",
"False",
"if",
"self",
"is",
"other",
":",
"return",
"True",
"return",
"self",
".",
"ListFields",
"(",
")",
"==",
"other",
".",
"ListFields",
"(",
")",
"cls",
".",
"__eq__",
"=",
"__eq__"
]
| https://github.com/google/earthenterprise/blob/0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9/earth_enterprise/src/google/protobuf-py/google/protobuf/internal/python_message.py#L641-L653 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/agw/ribbon/toolbar.py | python | RibbonToolBar.OnMouseLeave | (self, event) | Handles the ``wx.EVT_LEAVE_WINDOW`` event for :class:`RibbonToolBar`.
:param `event`: a :class:`MouseEvent` event to be processed. | Handles the ``wx.EVT_LEAVE_WINDOW`` event for :class:`RibbonToolBar`. | [
"Handles",
"the",
"wx",
".",
"EVT_LEAVE_WINDOW",
"event",
"for",
":",
"class",
":",
"RibbonToolBar",
"."
]
| def OnMouseLeave(self, event):
"""
Handles the ``wx.EVT_LEAVE_WINDOW`` event for :class:`RibbonToolBar`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
if self._hover_tool:
self._hover_tool.state &= ~RIBBON_TOOLBAR_TOOL_HOVER_MASK
self._hover_tool = None
self.Refresh(False) | [
"def",
"OnMouseLeave",
"(",
"self",
",",
"event",
")",
":",
"if",
"self",
".",
"_hover_tool",
":",
"self",
".",
"_hover_tool",
".",
"state",
"&=",
"~",
"RIBBON_TOOLBAR_TOOL_HOVER_MASK",
"self",
".",
"_hover_tool",
"=",
"None",
"self",
".",
"Refresh",
"(",
"False",
")"
]
| https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/ribbon/toolbar.py#L1343-L1353 |
||
apiaryio/snowcrash | b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3 | tools/gyp/pylib/gyp/xcodeproj_file.py | python | PBXProject.AddOrGetFileInRootGroup | (self, path) | return group.AddOrGetFileByPath(path, hierarchical) | Returns a PBXFileReference corresponding to path in the correct group
according to RootGroupForPath's heuristics.
If an existing PBXFileReference for path exists, it will be returned.
Otherwise, one will be created and returned. | Returns a PBXFileReference corresponding to path in the correct group
according to RootGroupForPath's heuristics. | [
"Returns",
"a",
"PBXFileReference",
"corresponding",
"to",
"path",
"in",
"the",
"correct",
"group",
"according",
"to",
"RootGroupForPath",
"s",
"heuristics",
"."
]
| def AddOrGetFileInRootGroup(self, path):
"""Returns a PBXFileReference corresponding to path in the correct group
according to RootGroupForPath's heuristics.
If an existing PBXFileReference for path exists, it will be returned.
Otherwise, one will be created and returned.
"""
(group, hierarchical) = self.RootGroupForPath(path)
return group.AddOrGetFileByPath(path, hierarchical) | [
"def",
"AddOrGetFileInRootGroup",
"(",
"self",
",",
"path",
")",
":",
"(",
"group",
",",
"hierarchical",
")",
"=",
"self",
".",
"RootGroupForPath",
"(",
"path",
")",
"return",
"group",
".",
"AddOrGetFileByPath",
"(",
"path",
",",
"hierarchical",
")"
]
| https://github.com/apiaryio/snowcrash/blob/b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3/tools/gyp/pylib/gyp/xcodeproj_file.py#L2621-L2630 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.