nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
list
function
stringlengths
34
151k
function_tokens
list
url
stringlengths
90
278
openvinotoolkit/openvino
dedcbeafa8b84cccdc55ca64b8da516682b381c7
tools/mo/openvino/tools/mo/middle/passes/infer.py
python
override_batch
(graph: Graph, batch: int)
Overrides batch for nodes with 'op' param set to 'Parameter' Parameters ---------- graph: graph to operate on batch: user defined integer value to override batch
Overrides batch for nodes with 'op' param set to 'Parameter' Parameters ---------- graph: graph to operate on batch: user defined integer value to override batch
[ "Overrides", "batch", "for", "nodes", "with", "op", "param", "set", "to", "Parameter", "Parameters", "----------", "graph", ":", "graph", "to", "operate", "on", "batch", ":", "user", "defined", "integer", "value", "to", "override", "batch" ]
def override_batch(graph: Graph, batch: int): """ Overrides batch for nodes with 'op' param set to 'Parameter' Parameters ---------- graph: graph to operate on batch: user defined integer value to override batch """ if batch is not None: in_nodes = graph.get_op_nodes(op='Parameter') for node in in_nodes: if not node.soft_get('fixed_batch', False): name = node.soft_get('name', node.id) idx, has_layout = get_dim_from_layout(node, 'N') if has_layout: if idx is not None: node['shape'][idx] = batch else: log.warning( 'Layout for input {} doesn\'t have batch dimension. Skipping this input.'.format(name)) else: validate_batch_in_shape(node['shape'], name) node['shape'][0] = batch
[ "def", "override_batch", "(", "graph", ":", "Graph", ",", "batch", ":", "int", ")", ":", "if", "batch", "is", "not", "None", ":", "in_nodes", "=", "graph", ".", "get_op_nodes", "(", "op", "=", "'Parameter'", ")", "for", "node", "in", "in_nodes", ":", "if", "not", "node", ".", "soft_get", "(", "'fixed_batch'", ",", "False", ")", ":", "name", "=", "node", ".", "soft_get", "(", "'name'", ",", "node", ".", "id", ")", "idx", ",", "has_layout", "=", "get_dim_from_layout", "(", "node", ",", "'N'", ")", "if", "has_layout", ":", "if", "idx", "is", "not", "None", ":", "node", "[", "'shape'", "]", "[", "idx", "]", "=", "batch", "else", ":", "log", ".", "warning", "(", "'Layout for input {} doesn\\'t have batch dimension. Skipping this input.'", ".", "format", "(", "name", ")", ")", "else", ":", "validate_batch_in_shape", "(", "node", "[", "'shape'", "]", ",", "name", ")", "node", "[", "'shape'", "]", "[", "0", "]", "=", "batch" ]
https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/tools/mo/openvino/tools/mo/middle/passes/infer.py#L216-L238
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/targets/arrayobj.py
python
_do_concatenate
(context, builder, axis, arrtys, arrs, arr_shapes, arr_strides, retty, ret_shapes)
return ret
Concatenate arrays along the given axis.
Concatenate arrays along the given axis.
[ "Concatenate", "arrays", "along", "the", "given", "axis", "." ]
def _do_concatenate(context, builder, axis, arrtys, arrs, arr_shapes, arr_strides, retty, ret_shapes): """ Concatenate arrays along the given axis. """ assert len(arrtys) == len(arrs) == len(arr_shapes) == len(arr_strides) zero = cgutils.intp_t(0) # Allocate return array ret = _empty_nd_impl(context, builder, retty, ret_shapes) ret_strides = cgutils.unpack_tuple(builder, ret.strides) # Compute the offset by which to bump the destination pointer # after copying each input array. # Morally, we need to copy each input array at different start indices # into the destination array; bumping the destination pointer # is simply easier than offsetting all destination indices. copy_offsets = [] for arr_sh in arr_shapes: # offset = ret_strides[axis] * input_shape[axis] offset = zero for dim, (size, stride) in enumerate(zip(arr_sh, ret_strides)): is_axis = builder.icmp_signed('==', axis.type(dim), axis) addend = builder.mul(size, stride) offset = builder.select(is_axis, builder.add(offset, addend), offset) copy_offsets.append(offset) # Copy input arrays into the return array ret_data = ret.data for arrty, arr, arr_sh, arr_st, offset in zip(arrtys, arrs, arr_shapes, arr_strides, copy_offsets): arr_data = arr.data # Do the copy loop # Note the loop nesting is optimized for the destination layout loop_nest = cgutils.loop_nest(builder, arr_sh, cgutils.intp_t, order=retty.layout) with loop_nest as indices: src_ptr = cgutils.get_item_pointer2(context, builder, arr_data, arr_sh, arr_st, arrty.layout, indices) val = load_item(context, builder, arrty, src_ptr) val = context.cast(builder, val, arrty.dtype, retty.dtype) dest_ptr = cgutils.get_item_pointer2(context, builder, ret_data, ret_shapes, ret_strides, retty.layout, indices) store_item(context, builder, retty, val, dest_ptr) # Bump destination pointer ret_data = cgutils.pointer_add(builder, ret_data, offset) return ret
[ "def", "_do_concatenate", "(", "context", ",", "builder", ",", "axis", ",", "arrtys", ",", "arrs", ",", "arr_shapes", ",", "arr_strides", ",", "retty", ",", "ret_shapes", ")", ":", "assert", "len", "(", "arrtys", ")", "==", "len", "(", "arrs", ")", "==", "len", "(", "arr_shapes", ")", "==", "len", "(", "arr_strides", ")", "zero", "=", "cgutils", ".", "intp_t", "(", "0", ")", "# Allocate return array", "ret", "=", "_empty_nd_impl", "(", "context", ",", "builder", ",", "retty", ",", "ret_shapes", ")", "ret_strides", "=", "cgutils", ".", "unpack_tuple", "(", "builder", ",", "ret", ".", "strides", ")", "# Compute the offset by which to bump the destination pointer", "# after copying each input array.", "# Morally, we need to copy each input array at different start indices", "# into the destination array; bumping the destination pointer", "# is simply easier than offsetting all destination indices.", "copy_offsets", "=", "[", "]", "for", "arr_sh", "in", "arr_shapes", ":", "# offset = ret_strides[axis] * input_shape[axis]", "offset", "=", "zero", "for", "dim", ",", "(", "size", ",", "stride", ")", "in", "enumerate", "(", "zip", "(", "arr_sh", ",", "ret_strides", ")", ")", ":", "is_axis", "=", "builder", ".", "icmp_signed", "(", "'=='", ",", "axis", ".", "type", "(", "dim", ")", ",", "axis", ")", "addend", "=", "builder", ".", "mul", "(", "size", ",", "stride", ")", "offset", "=", "builder", ".", "select", "(", "is_axis", ",", "builder", ".", "add", "(", "offset", ",", "addend", ")", ",", "offset", ")", "copy_offsets", ".", "append", "(", "offset", ")", "# Copy input arrays into the return array", "ret_data", "=", "ret", ".", "data", "for", "arrty", ",", "arr", ",", "arr_sh", ",", "arr_st", ",", "offset", "in", "zip", "(", "arrtys", ",", "arrs", ",", "arr_shapes", ",", "arr_strides", ",", "copy_offsets", ")", ":", "arr_data", "=", "arr", ".", "data", "# Do the copy loop", "# Note the loop nesting is optimized for the destination layout", "loop_nest", "=", "cgutils", ".", "loop_nest", "(", "builder", ",", "arr_sh", ",", "cgutils", ".", "intp_t", ",", "order", "=", "retty", ".", "layout", ")", "with", "loop_nest", "as", "indices", ":", "src_ptr", "=", "cgutils", ".", "get_item_pointer2", "(", "context", ",", "builder", ",", "arr_data", ",", "arr_sh", ",", "arr_st", ",", "arrty", ".", "layout", ",", "indices", ")", "val", "=", "load_item", "(", "context", ",", "builder", ",", "arrty", ",", "src_ptr", ")", "val", "=", "context", ".", "cast", "(", "builder", ",", "val", ",", "arrty", ".", "dtype", ",", "retty", ".", "dtype", ")", "dest_ptr", "=", "cgutils", ".", "get_item_pointer2", "(", "context", ",", "builder", ",", "ret_data", ",", "ret_shapes", ",", "ret_strides", ",", "retty", ".", "layout", ",", "indices", ")", "store_item", "(", "context", ",", "builder", ",", "retty", ",", "val", ",", "dest_ptr", ")", "# Bump destination pointer", "ret_data", "=", "cgutils", ".", "pointer_add", "(", "builder", ",", "ret_data", ",", "offset", ")", "return", "ret" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/targets/arrayobj.py#L4519-L4577
apiaryio/snowcrash
b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3
tools/gyp/pylib/gyp/generator/analyzer.py
python
CalculateVariables
(default_variables, params)
Calculate additional variables for use in the build (called by gyp).
Calculate additional variables for use in the build (called by gyp).
[ "Calculate", "additional", "variables", "for", "use", "in", "the", "build", "(", "called", "by", "gyp", ")", "." ]
def CalculateVariables(default_variables, params): """Calculate additional variables for use in the build (called by gyp).""" flavor = gyp.common.GetFlavor(params) if flavor == 'mac': default_variables.setdefault('OS', 'mac') elif flavor == 'win': default_variables.setdefault('OS', 'win') # Copy additional generator configuration data from VS, which is shared # by the Windows Ninja generator. import gyp.generator.msvs as msvs_generator generator_additional_non_configuration_keys = getattr(msvs_generator, 'generator_additional_non_configuration_keys', []) generator_additional_path_sections = getattr(msvs_generator, 'generator_additional_path_sections', []) gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) else: operating_system = flavor if flavor == 'android': operating_system = 'linux' # Keep this legacy behavior for now. default_variables.setdefault('OS', operating_system)
[ "def", "CalculateVariables", "(", "default_variables", ",", "params", ")", ":", "flavor", "=", "gyp", ".", "common", ".", "GetFlavor", "(", "params", ")", "if", "flavor", "==", "'mac'", ":", "default_variables", ".", "setdefault", "(", "'OS'", ",", "'mac'", ")", "elif", "flavor", "==", "'win'", ":", "default_variables", ".", "setdefault", "(", "'OS'", ",", "'win'", ")", "# Copy additional generator configuration data from VS, which is shared", "# by the Windows Ninja generator.", "import", "gyp", ".", "generator", ".", "msvs", "as", "msvs_generator", "generator_additional_non_configuration_keys", "=", "getattr", "(", "msvs_generator", ",", "'generator_additional_non_configuration_keys'", ",", "[", "]", ")", "generator_additional_path_sections", "=", "getattr", "(", "msvs_generator", ",", "'generator_additional_path_sections'", ",", "[", "]", ")", "gyp", ".", "msvs_emulation", ".", "CalculateCommonVariables", "(", "default_variables", ",", "params", ")", "else", ":", "operating_system", "=", "flavor", "if", "flavor", "==", "'android'", ":", "operating_system", "=", "'linux'", "# Keep this legacy behavior for now.", "default_variables", ".", "setdefault", "(", "'OS'", ",", "operating_system", ")" ]
https://github.com/apiaryio/snowcrash/blob/b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3/tools/gyp/pylib/gyp/generator/analyzer.py#L575-L595
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_windows.py
python
PageSetupDialog.ShowModal
(*args, **kwargs)
return _windows_.PageSetupDialog_ShowModal(*args, **kwargs)
ShowModal(self) -> int
ShowModal(self) -> int
[ "ShowModal", "(", "self", ")", "-", ">", "int" ]
def ShowModal(*args, **kwargs): """ShowModal(self) -> int""" return _windows_.PageSetupDialog_ShowModal(*args, **kwargs)
[ "def", "ShowModal", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_windows_", ".", "PageSetupDialog_ShowModal", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_windows.py#L5020-L5022
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
lts/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
python
_GenerateMSVSProject
(project, options, version, generator_flags)
return missing_sources
Generates a .vcproj file. It may create .rules and .user files too. Arguments: project: The project object we will generate the file for. options: Global options passed to the generator. version: The VisualStudioVersion object. generator_flags: dict of generator-specific flags.
Generates a .vcproj file. It may create .rules and .user files too.
[ "Generates", "a", ".", "vcproj", "file", ".", "It", "may", "create", ".", "rules", "and", ".", "user", "files", "too", "." ]
def _GenerateMSVSProject(project, options, version, generator_flags): """Generates a .vcproj file. It may create .rules and .user files too. Arguments: project: The project object we will generate the file for. options: Global options passed to the generator. version: The VisualStudioVersion object. generator_flags: dict of generator-specific flags. """ spec = project.spec gyp.common.EnsureDirExists(project.path) platforms = _GetUniquePlatforms(spec) p = MSVSProject.Writer(project.path, version, spec['target_name'], project.guid, platforms) # Get directory project file is in. project_dir = os.path.split(project.path)[0] gyp_path = _NormalizedSource(project.build_file) relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) config_type = _GetMSVSConfigurationType(spec, project.build_file) for config_name, config in spec['configurations'].items(): _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config) # MSVC08 and prior version cannot handle duplicate basenames in the same # target. # TODO: Take excluded sources into consideration if possible. _ValidateSourcesForMSVSProject(spec, version) # Prepare list of sources and excluded sources. gyp_file = os.path.split(project.build_file)[1] sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file) # Add rules. actions_to_add = {} _GenerateRulesForMSVS(p, project_dir, options, spec, sources, excluded_sources, actions_to_add) list_excluded = generator_flags.get('msvs_list_excluded_files', True) sources, excluded_sources, excluded_idl = ( _AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir, sources, excluded_sources, list_excluded, version)) # Add in files. missing_sources = _VerifySourcesExist(sources, project_dir) p.AddFiles(sources) _AddToolFilesToMSVS(p, spec) _HandlePreCompiledHeaders(p, sources, spec) _AddActions(actions_to_add, spec, relative_path_of_gyp_file) _AddCopies(actions_to_add, spec) _WriteMSVSUserFile(project.path, version, spec) # NOTE: this stanza must appear after all actions have been decided. # Don't excluded sources with actions attached, or they won't run. excluded_sources = _FilterActionsFromExcluded( excluded_sources, actions_to_add) _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded) _AddAccumulatedActionsToMSVS(p, spec, actions_to_add) # Write it out. p.WriteIfChanged() return missing_sources
[ "def", "_GenerateMSVSProject", "(", "project", ",", "options", ",", "version", ",", "generator_flags", ")", ":", "spec", "=", "project", ".", "spec", "gyp", ".", "common", ".", "EnsureDirExists", "(", "project", ".", "path", ")", "platforms", "=", "_GetUniquePlatforms", "(", "spec", ")", "p", "=", "MSVSProject", ".", "Writer", "(", "project", ".", "path", ",", "version", ",", "spec", "[", "'target_name'", "]", ",", "project", ".", "guid", ",", "platforms", ")", "# Get directory project file is in.", "project_dir", "=", "os", ".", "path", ".", "split", "(", "project", ".", "path", ")", "[", "0", "]", "gyp_path", "=", "_NormalizedSource", "(", "project", ".", "build_file", ")", "relative_path_of_gyp_file", "=", "gyp", ".", "common", ".", "RelativePath", "(", "gyp_path", ",", "project_dir", ")", "config_type", "=", "_GetMSVSConfigurationType", "(", "spec", ",", "project", ".", "build_file", ")", "for", "config_name", ",", "config", "in", "spec", "[", "'configurations'", "]", ".", "items", "(", ")", ":", "_AddConfigurationToMSVSProject", "(", "p", ",", "spec", ",", "config_type", ",", "config_name", ",", "config", ")", "# MSVC08 and prior version cannot handle duplicate basenames in the same", "# target.", "# TODO: Take excluded sources into consideration if possible.", "_ValidateSourcesForMSVSProject", "(", "spec", ",", "version", ")", "# Prepare list of sources and excluded sources.", "gyp_file", "=", "os", ".", "path", ".", "split", "(", "project", ".", "build_file", ")", "[", "1", "]", "sources", ",", "excluded_sources", "=", "_PrepareListOfSources", "(", "spec", ",", "generator_flags", ",", "gyp_file", ")", "# Add rules.", "actions_to_add", "=", "{", "}", "_GenerateRulesForMSVS", "(", "p", ",", "project_dir", ",", "options", ",", "spec", ",", "sources", ",", "excluded_sources", ",", "actions_to_add", ")", "list_excluded", "=", "generator_flags", ".", "get", "(", "'msvs_list_excluded_files'", ",", "True", ")", "sources", ",", "excluded_sources", ",", "excluded_idl", "=", "(", "_AdjustSourcesAndConvertToFilterHierarchy", "(", "spec", ",", "options", ",", "project_dir", ",", "sources", ",", "excluded_sources", ",", "list_excluded", ",", "version", ")", ")", "# Add in files.", "missing_sources", "=", "_VerifySourcesExist", "(", "sources", ",", "project_dir", ")", "p", ".", "AddFiles", "(", "sources", ")", "_AddToolFilesToMSVS", "(", "p", ",", "spec", ")", "_HandlePreCompiledHeaders", "(", "p", ",", "sources", ",", "spec", ")", "_AddActions", "(", "actions_to_add", ",", "spec", ",", "relative_path_of_gyp_file", ")", "_AddCopies", "(", "actions_to_add", ",", "spec", ")", "_WriteMSVSUserFile", "(", "project", ".", "path", ",", "version", ",", "spec", ")", "# NOTE: this stanza must appear after all actions have been decided.", "# Don't excluded sources with actions attached, or they won't run.", "excluded_sources", "=", "_FilterActionsFromExcluded", "(", "excluded_sources", ",", "actions_to_add", ")", "_ExcludeFilesFromBeingBuilt", "(", "p", ",", "spec", ",", "excluded_sources", ",", "excluded_idl", ",", "list_excluded", ")", "_AddAccumulatedActionsToMSVS", "(", "p", ",", "spec", ",", "actions_to_add", ")", "# Write it out.", "p", ".", "WriteIfChanged", "(", ")", "return", "missing_sources" ]
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py#L991-L1058
keyboardio/Kaleidoscope
d59604e98b2439d108647f15be52984a6837d360
bin/cpplint.py
python
_SetCountingStyle
(level)
Sets the module's counting options.
Sets the module's counting options.
[ "Sets", "the", "module", "s", "counting", "options", "." ]
def _SetCountingStyle(level): """Sets the module's counting options.""" _cpplint_state.SetCountingStyle(level)
[ "def", "_SetCountingStyle", "(", "level", ")", ":", "_cpplint_state", ".", "SetCountingStyle", "(", "level", ")" ]
https://github.com/keyboardio/Kaleidoscope/blob/d59604e98b2439d108647f15be52984a6837d360/bin/cpplint.py#L1190-L1192
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/masked/numctrl.py
python
NumCtrl.GetLimitOnFieldChange
(self)
return self.IsLimitedOnFieldChange()
(For regularization of property accessors)
(For regularization of property accessors)
[ "(", "For", "regularization", "of", "property", "accessors", ")" ]
def GetLimitOnFieldChange(self): """ (For regularization of property accessors) """ return self.IsLimitedOnFieldChange()
[ "def", "GetLimitOnFieldChange", "(", "self", ")", ":", "return", "self", ".", "IsLimitedOnFieldChange", "(", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/masked/numctrl.py#L1455-L1457
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/train/model.py
python
Model.predict
(self, *predict_data)
return result
Generate output predictions for the input samples. Args: predict_data (Optional[Tensor, list[Tensor], tuple[Tensor]]): The predict data, can be a single tensor, a list of tensor, or a tuple of tensor. Returns: Tensor, array(s) of predictions. Examples: >>> import mindspore as ms >>> from mindspore import Model, Tensor >>> >>> input_data = Tensor(np.random.randint(0, 255, [1, 1, 32, 32]), ms.float32) >>> model = Model(Net()) >>> result = model.predict(input_data)
Generate output predictions for the input samples.
[ "Generate", "output", "predictions", "for", "the", "input", "samples", "." ]
def predict(self, *predict_data): """ Generate output predictions for the input samples. Args: predict_data (Optional[Tensor, list[Tensor], tuple[Tensor]]): The predict data, can be a single tensor, a list of tensor, or a tuple of tensor. Returns: Tensor, array(s) of predictions. Examples: >>> import mindspore as ms >>> from mindspore import Model, Tensor >>> >>> input_data = Tensor(np.random.randint(0, 255, [1, 1, 32, 32]), ms.float32) >>> model = Model(Net()) >>> result = model.predict(input_data) """ self._predict_network.set_train(False) check_input_data(*predict_data, data_class=(int, float, str, None, Tensor)) _parallel_predict_check() result = self._predict_network(*predict_data) check_output_data(result) return result
[ "def", "predict", "(", "self", ",", "*", "predict_data", ")", ":", "self", ".", "_predict_network", ".", "set_train", "(", "False", ")", "check_input_data", "(", "*", "predict_data", ",", "data_class", "=", "(", "int", ",", "float", ",", "str", ",", "None", ",", "Tensor", ")", ")", "_parallel_predict_check", "(", ")", "result", "=", "self", ".", "_predict_network", "(", "*", "predict_data", ")", "check_output_data", "(", "result", ")", "return", "result" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/train/model.py#L987-L1012
hpi-xnor/BMXNet
ed0b201da6667887222b8e4b5f997c4f6b61943d
python/mxnet/ndarray/ndarray.py
python
NDArray.degrees
(self, *args, **kwargs)
return op.degrees(self, *args, **kwargs)
Convenience fluent method for :py:func:`degrees`. The arguments are the same as for :py:func:`degrees`, with this array as data.
Convenience fluent method for :py:func:`degrees`.
[ "Convenience", "fluent", "method", "for", ":", "py", ":", "func", ":", "degrees", "." ]
def degrees(self, *args, **kwargs): """Convenience fluent method for :py:func:`degrees`. The arguments are the same as for :py:func:`degrees`, with this array as data. """ return op.degrees(self, *args, **kwargs)
[ "def", "degrees", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "op", ".", "degrees", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/hpi-xnor/BMXNet/blob/ed0b201da6667887222b8e4b5f997c4f6b61943d/python/mxnet/ndarray/ndarray.py#L1356-L1362
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/stc.py
python
StyledTextCtrl.SetVScrollBar
(*args, **kwargs)
return _stc.StyledTextCtrl_SetVScrollBar(*args, **kwargs)
SetVScrollBar(self, ScrollBar bar) Set the vertical scrollbar to use instead of the one that's built-in.
SetVScrollBar(self, ScrollBar bar)
[ "SetVScrollBar", "(", "self", "ScrollBar", "bar", ")" ]
def SetVScrollBar(*args, **kwargs): """ SetVScrollBar(self, ScrollBar bar) Set the vertical scrollbar to use instead of the one that's built-in. """ return _stc.StyledTextCtrl_SetVScrollBar(*args, **kwargs)
[ "def", "SetVScrollBar", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_stc", ".", "StyledTextCtrl_SetVScrollBar", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/stc.py#L6629-L6635
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/requests/_internal_utils.py
python
unicode_is_ascii
(u_string)
Determine if unicode string only contains ASCII characters. :param str u_string: unicode string to check. Must be unicode and not Python 2 `str`. :rtype: bool
Determine if unicode string only contains ASCII characters.
[ "Determine", "if", "unicode", "string", "only", "contains", "ASCII", "characters", "." ]
def unicode_is_ascii(u_string): """Determine if unicode string only contains ASCII characters. :param str u_string: unicode string to check. Must be unicode and not Python 2 `str`. :rtype: bool """ assert isinstance(u_string, str) try: u_string.encode('ascii') return True except UnicodeEncodeError: return False
[ "def", "unicode_is_ascii", "(", "u_string", ")", ":", "assert", "isinstance", "(", "u_string", ",", "str", ")", "try", ":", "u_string", ".", "encode", "(", "'ascii'", ")", "return", "True", "except", "UnicodeEncodeError", ":", "return", "False" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/requests/_internal_utils.py#L30-L42
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/types/containers.py
python
UniTuple.unify
(self, typingctx, other)
Unify UniTuples with their dtype
Unify UniTuples with their dtype
[ "Unify", "UniTuples", "with", "their", "dtype" ]
def unify(self, typingctx, other): """ Unify UniTuples with their dtype """ if isinstance(other, UniTuple) and len(self) == len(other): dtype = typingctx.unify_pairs(self.dtype, other.dtype) if dtype is not None: return UniTuple(dtype=dtype, count=self.count)
[ "def", "unify", "(", "self", ",", "typingctx", ",", "other", ")", ":", "if", "isinstance", "(", "other", ",", "UniTuple", ")", "and", "len", "(", "self", ")", "==", "len", "(", "other", ")", ":", "dtype", "=", "typingctx", ".", "unify_pairs", "(", "self", ".", "dtype", ",", "other", ".", "dtype", ")", "if", "dtype", "is", "not", "None", ":", "return", "UniTuple", "(", "dtype", "=", "dtype", ",", "count", "=", "self", ".", "count", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/types/containers.py#L218-L225
freeorion/freeorion
c266a40eccd3a99a17de8fe57c36ef6ba3771665
default/python/AI/savegame_codec/_decoder.py
python
_replace_quote_placeholders
(s)
return s
Replace PLACEHOLDER with quotes if not nested within another encoded container. To be able to use tuples as dictionary keys, to use standard json decoder, the entire tuple with its content must be encoded as a single string. The inner objects may no longer be quoted as that would prematurely terminate the strings. Inner quotes are therefore replaced with the PLACEHOLDER char. Example: output = encode(tuple(["1", "string"])) "__TUPLE__([$1$, $string$])" To be able to decode the inner content, the PLACEHOLDER must be converted to quotes again.
Replace PLACEHOLDER with quotes if not nested within another encoded container.
[ "Replace", "PLACEHOLDER", "with", "quotes", "if", "not", "nested", "within", "another", "encoded", "container", "." ]
def _replace_quote_placeholders(s): """Replace PLACEHOLDER with quotes if not nested within another encoded container. To be able to use tuples as dictionary keys, to use standard json decoder, the entire tuple with its content must be encoded as a single string. The inner objects may no longer be quoted as that would prematurely terminate the strings. Inner quotes are therefore replaced with the PLACEHOLDER char. Example: output = encode(tuple(["1", "string"])) "__TUPLE__([$1$, $string$])" To be able to decode the inner content, the PLACEHOLDER must be converted to quotes again. """ n = 0 # counts nesting level (i.e. number of opened but not closed parentheses) start = 0 # starting point for string replacement for i in range(len(s)): if s[i] == "(": # if this is an outer opening parenthesis, then replace placeholder from last parenthesis to here if n == 0: s = s[:start] + s[start:i].replace(PLACEHOLDER, '"') + s[i:] n += 1 elif s[i] == ")": n -= 1 if n == 0: start = i s = s[:start] + s[start:].replace(PLACEHOLDER, '"') return s
[ "def", "_replace_quote_placeholders", "(", "s", ")", ":", "n", "=", "0", "# counts nesting level (i.e. number of opened but not closed parentheses)", "start", "=", "0", "# starting point for string replacement", "for", "i", "in", "range", "(", "len", "(", "s", ")", ")", ":", "if", "s", "[", "i", "]", "==", "\"(\"", ":", "# if this is an outer opening parenthesis, then replace placeholder from last parenthesis to here", "if", "n", "==", "0", ":", "s", "=", "s", "[", ":", "start", "]", "+", "s", "[", "start", ":", "i", "]", ".", "replace", "(", "PLACEHOLDER", ",", "'\"'", ")", "+", "s", "[", "i", ":", "]", "n", "+=", "1", "elif", "s", "[", "i", "]", "==", "\")\"", ":", "n", "-=", "1", "if", "n", "==", "0", ":", "start", "=", "i", "s", "=", "s", "[", ":", "start", "]", "+", "s", "[", "start", ":", "]", ".", "replace", "(", "PLACEHOLDER", ",", "'\"'", ")", "return", "s" ]
https://github.com/freeorion/freeorion/blob/c266a40eccd3a99a17de8fe57c36ef6ba3771665/default/python/AI/savegame_codec/_decoder.py#L192-L220
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/checkpoint/python/python_state.py
python
NumpyState.__getattribute__
(self, name)
return value
Un-wrap `_NumpyWrapper` objects when accessing attributes.
Un-wrap `_NumpyWrapper` objects when accessing attributes.
[ "Un", "-", "wrap", "_NumpyWrapper", "objects", "when", "accessing", "attributes", "." ]
def __getattribute__(self, name): """Un-wrap `_NumpyWrapper` objects when accessing attributes.""" value = super(NumpyState, self).__getattribute__(name) if isinstance(value, _NumpyWrapper): return value.array return value
[ "def", "__getattribute__", "(", "self", ",", "name", ")", ":", "value", "=", "super", "(", "NumpyState", ",", "self", ")", ".", "__getattribute__", "(", "name", ")", "if", "isinstance", "(", "value", ",", "_NumpyWrapper", ")", ":", "return", "value", ".", "array", "return", "value" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/checkpoint/python/python_state.py#L91-L96
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
remoting/tools/build/remoting_copy_locales.py
python
repack_locales
(locales)
Loop over and repack the given locales.
Loop over and repack the given locales.
[ "Loop", "over", "and", "repack", "the", "given", "locales", "." ]
def repack_locales(locales): """ Loop over and repack the given locales.""" for locale in locales: inputs = calc_inputs(locale) output = calc_output(locale) data_pack.DataPack.RePack(output, inputs)
[ "def", "repack_locales", "(", "locales", ")", ":", "for", "locale", "in", "locales", ":", "inputs", "=", "calc_inputs", "(", "locale", ")", "output", "=", "calc_output", "(", "locale", ")", "data_pack", ".", "DataPack", ".", "RePack", "(", "output", ",", "inputs", ")" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/remoting/tools/build/remoting_copy_locales.py#L90-L95
goldeneye-source/ges-code
2630cd8ef3d015af53c72ec2e19fc1f7e7fe8d9d
thirdparty/protobuf-2.3.0/python/google/protobuf/internal/containers.py
python
BaseContainer.__init__
(self, message_listener)
Args: message_listener: A MessageListener implementation. The RepeatedScalarFieldContainer will call this object's Modified() method when it is modified.
Args: message_listener: A MessageListener implementation. The RepeatedScalarFieldContainer will call this object's Modified() method when it is modified.
[ "Args", ":", "message_listener", ":", "A", "MessageListener", "implementation", ".", "The", "RepeatedScalarFieldContainer", "will", "call", "this", "object", "s", "Modified", "()", "method", "when", "it", "is", "modified", "." ]
def __init__(self, message_listener): """ Args: message_listener: A MessageListener implementation. The RepeatedScalarFieldContainer will call this object's Modified() method when it is modified. """ self._message_listener = message_listener self._values = []
[ "def", "__init__", "(", "self", ",", "message_listener", ")", ":", "self", ".", "_message_listener", "=", "message_listener", "self", ".", "_values", "=", "[", "]" ]
https://github.com/goldeneye-source/ges-code/blob/2630cd8ef3d015af53c72ec2e19fc1f7e7fe8d9d/thirdparty/protobuf-2.3.0/python/google/protobuf/internal/containers.py#L52-L60
gemrb/gemrb
730206eed8d1dd358ca5e69a62f9e099aa22ffc6
gemrb/GUIScripts/bg2/LUHLASelection.py
python
HLAMarkButton
(i, select)
return
Enables, disables, or highlights the given button. If select is true, the button is highlighted.
Enables, disables, or highlights the given button.
[ "Enables", "disables", "or", "highlights", "the", "given", "button", "." ]
def HLAMarkButton (i, select): """Enables, disables, or highlights the given button. If select is true, the button is highlighted.""" j = ( GemRB.GetVar("HLATopIndex") + 1 ) * 5 - 5 if select: state = IE_GUI_BUTTON_SELECTED else: if HLAAbilities[i][1]: state = IE_GUI_BUTTON_ENABLED else: # can't learn state = IE_GUI_BUTTON_LOCKED # we have to use the index on the actual grid SpellButton = HLAWindow.GetControl(i-j) SpellButton.SetState(state) return
[ "def", "HLAMarkButton", "(", "i", ",", "select", ")", ":", "j", "=", "(", "GemRB", ".", "GetVar", "(", "\"HLATopIndex\"", ")", "+", "1", ")", "*", "5", "-", "5", "if", "select", ":", "state", "=", "IE_GUI_BUTTON_SELECTED", "else", ":", "if", "HLAAbilities", "[", "i", "]", "[", "1", "]", ":", "state", "=", "IE_GUI_BUTTON_ENABLED", "else", ":", "# can't learn", "state", "=", "IE_GUI_BUTTON_LOCKED", "# we have to use the index on the actual grid", "SpellButton", "=", "HLAWindow", ".", "GetControl", "(", "i", "-", "j", ")", "SpellButton", ".", "SetState", "(", "state", ")", "return" ]
https://github.com/gemrb/gemrb/blob/730206eed8d1dd358ca5e69a62f9e099aa22ffc6/gemrb/GUIScripts/bg2/LUHLASelection.py#L270-L288
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/os.py
python
getenv
(key, default=None)
return environ.get(key, default)
Get an environment variable, return None if it doesn't exist. The optional second argument can specify an alternate default. key, default and the result are str.
Get an environment variable, return None if it doesn't exist. The optional second argument can specify an alternate default. key, default and the result are str.
[ "Get", "an", "environment", "variable", "return", "None", "if", "it", "doesn", "t", "exist", ".", "The", "optional", "second", "argument", "can", "specify", "an", "alternate", "default", ".", "key", "default", "and", "the", "result", "are", "str", "." ]
def getenv(key, default=None): """Get an environment variable, return None if it doesn't exist. The optional second argument can specify an alternate default. key, default and the result are str.""" return environ.get(key, default)
[ "def", "getenv", "(", "key", ",", "default", "=", "None", ")", ":", "return", "environ", ".", "get", "(", "key", ",", "default", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/os.py#L772-L776
panda3d/panda3d
833ad89ebad58395d0af0b7ec08538e5e4308265
contrib/src/sceneeditor/seMopathRecorder.py
python
namePathPanel.onDestroy
(self, event)
If you have open any thing, please rewrite here!
If you have open any thing, please rewrite here!
[ "If", "you", "have", "open", "any", "thing", "please", "rewrite", "here!" ]
def onDestroy(self, event): ''' If you have open any thing, please rewrite here! ''' pass
[ "def", "onDestroy", "(", "self", ",", "event", ")", ":", "pass" ]
https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/contrib/src/sceneeditor/seMopathRecorder.py#L2063-L2067
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/extern/aui/auibook.py
python
AuiTabContainer.SetAGWFlags
(self, agwFlags)
Sets the tab art flags. :param integer `agwFlags`: a combination of the following values: ==================================== ================================== Flag name Description ==================================== ================================== ``AUI_NB_TOP`` With this style, tabs are drawn along the top of the notebook ``AUI_NB_LEFT`` With this style, tabs are drawn along the left of the notebook. Not implemented yet ``AUI_NB_RIGHT`` With this style, tabs are drawn along the right of the notebook. Not implemented yet ``AUI_NB_BOTTOM`` With this style, tabs are drawn along the bottom of the notebook ``AUI_NB_TAB_SPLIT`` Allows the tab control to be split by dragging a tab ``AUI_NB_TAB_MOVE`` Allows a tab to be moved horizontally by dragging ``AUI_NB_TAB_EXTERNAL_MOVE`` Allows a tab to be moved to another tab control ``AUI_NB_TAB_FIXED_WIDTH`` With this style, all tabs have the same width ``AUI_NB_SCROLL_BUTTONS`` With this style, left and right scroll buttons are displayed ``AUI_NB_WINDOWLIST_BUTTON`` With this style, a drop-down list of windows is available ``AUI_NB_CLOSE_BUTTON`` With this style, a close button is available on the tab bar ``AUI_NB_CLOSE_ON_ACTIVE_TAB`` With this style, a close button is available on the active tab ``AUI_NB_CLOSE_ON_ALL_TABS`` With this style, a close button is available on all tabs ``AUI_NB_MIDDLE_CLICK_CLOSE`` Allows to close :class:`AuiNotebook` tabs by mouse middle button click ``AUI_NB_SUB_NOTEBOOK`` This style is used by :class:`~lib.agw.aui.framemanager.AuiManager` to create automatic AuiNotebooks ``AUI_NB_HIDE_ON_SINGLE_TAB`` Hides the tab window if only one tab is present ``AUI_NB_SMART_TABS`` Use Smart Tabbing, like ``Alt`` + ``Tab`` on Windows ``AUI_NB_USE_IMAGES_DROPDOWN`` Uses images on dropdown window list menu instead of check items ``AUI_NB_CLOSE_ON_TAB_LEFT`` Draws the tab close button on the left instead of on the right (a la Camino browser) ``AUI_NB_TAB_FLOAT`` Allows the floating of single tabs. Known limitation: when the notebook is more or less full screen, tabs cannot be dragged far enough outside of the notebook to become floating pages ``AUI_NB_DRAW_DND_TAB`` Draws an image representation of a tab while dragging (on by default) ``AUI_NB_ORDER_BY_ACCESS`` Tab navigation order by last access time for the tabs ``AUI_NB_NO_TAB_FOCUS`` Don't draw tab focus rectangle ==================================== ================================== .. todo:: Implementation of flags ``AUI_NB_RIGHT`` and ``AUI_NB_LEFT``.
Sets the tab art flags.
[ "Sets", "the", "tab", "art", "flags", "." ]
def SetAGWFlags(self, agwFlags): """ Sets the tab art flags. :param integer `agwFlags`: a combination of the following values: ==================================== ================================== Flag name Description ==================================== ================================== ``AUI_NB_TOP`` With this style, tabs are drawn along the top of the notebook ``AUI_NB_LEFT`` With this style, tabs are drawn along the left of the notebook. Not implemented yet ``AUI_NB_RIGHT`` With this style, tabs are drawn along the right of the notebook. Not implemented yet ``AUI_NB_BOTTOM`` With this style, tabs are drawn along the bottom of the notebook ``AUI_NB_TAB_SPLIT`` Allows the tab control to be split by dragging a tab ``AUI_NB_TAB_MOVE`` Allows a tab to be moved horizontally by dragging ``AUI_NB_TAB_EXTERNAL_MOVE`` Allows a tab to be moved to another tab control ``AUI_NB_TAB_FIXED_WIDTH`` With this style, all tabs have the same width ``AUI_NB_SCROLL_BUTTONS`` With this style, left and right scroll buttons are displayed ``AUI_NB_WINDOWLIST_BUTTON`` With this style, a drop-down list of windows is available ``AUI_NB_CLOSE_BUTTON`` With this style, a close button is available on the tab bar ``AUI_NB_CLOSE_ON_ACTIVE_TAB`` With this style, a close button is available on the active tab ``AUI_NB_CLOSE_ON_ALL_TABS`` With this style, a close button is available on all tabs ``AUI_NB_MIDDLE_CLICK_CLOSE`` Allows to close :class:`AuiNotebook` tabs by mouse middle button click ``AUI_NB_SUB_NOTEBOOK`` This style is used by :class:`~lib.agw.aui.framemanager.AuiManager` to create automatic AuiNotebooks ``AUI_NB_HIDE_ON_SINGLE_TAB`` Hides the tab window if only one tab is present ``AUI_NB_SMART_TABS`` Use Smart Tabbing, like ``Alt`` + ``Tab`` on Windows ``AUI_NB_USE_IMAGES_DROPDOWN`` Uses images on dropdown window list menu instead of check items ``AUI_NB_CLOSE_ON_TAB_LEFT`` Draws the tab close button on the left instead of on the right (a la Camino browser) ``AUI_NB_TAB_FLOAT`` Allows the floating of single tabs. Known limitation: when the notebook is more or less full screen, tabs cannot be dragged far enough outside of the notebook to become floating pages ``AUI_NB_DRAW_DND_TAB`` Draws an image representation of a tab while dragging (on by default) ``AUI_NB_ORDER_BY_ACCESS`` Tab navigation order by last access time for the tabs ``AUI_NB_NO_TAB_FOCUS`` Don't draw tab focus rectangle ==================================== ================================== .. todo:: Implementation of flags ``AUI_NB_RIGHT`` and ``AUI_NB_LEFT``. """ self._agwFlags = agwFlags # check for new close button settings self.RemoveButton(AUI_BUTTON_LEFT) self.RemoveButton(AUI_BUTTON_RIGHT) self.RemoveButton(AUI_BUTTON_WINDOWLIST) self.RemoveButton(AUI_BUTTON_CLOSE) if agwFlags & AUI_NB_SCROLL_BUTTONS: self.AddButton(AUI_BUTTON_LEFT, wx.LEFT) self.AddButton(AUI_BUTTON_RIGHT, wx.RIGHT) if agwFlags & AUI_NB_WINDOWLIST_BUTTON: self.AddButton(AUI_BUTTON_WINDOWLIST, wx.RIGHT) if agwFlags & AUI_NB_CLOSE_BUTTON: self.AddButton(AUI_BUTTON_CLOSE, wx.RIGHT) if self._art: self._art.SetAGWFlags(self._agwFlags)
[ "def", "SetAGWFlags", "(", "self", ",", "agwFlags", ")", ":", "self", ".", "_agwFlags", "=", "agwFlags", "# check for new close button settings", "self", ".", "RemoveButton", "(", "AUI_BUTTON_LEFT", ")", "self", ".", "RemoveButton", "(", "AUI_BUTTON_RIGHT", ")", "self", ".", "RemoveButton", "(", "AUI_BUTTON_WINDOWLIST", ")", "self", ".", "RemoveButton", "(", "AUI_BUTTON_CLOSE", ")", "if", "agwFlags", "&", "AUI_NB_SCROLL_BUTTONS", ":", "self", ".", "AddButton", "(", "AUI_BUTTON_LEFT", ",", "wx", ".", "LEFT", ")", "self", ".", "AddButton", "(", "AUI_BUTTON_RIGHT", ",", "wx", ".", "RIGHT", ")", "if", "agwFlags", "&", "AUI_NB_WINDOWLIST_BUTTON", ":", "self", ".", "AddButton", "(", "AUI_BUTTON_WINDOWLIST", ",", "wx", ".", "RIGHT", ")", "if", "agwFlags", "&", "AUI_NB_CLOSE_BUTTON", ":", "self", ".", "AddButton", "(", "AUI_BUTTON_CLOSE", ",", "wx", ".", "RIGHT", ")", "if", "self", ".", "_art", ":", "self", ".", "_art", ".", "SetAGWFlags", "(", "self", ".", "_agwFlags", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/extern/aui/auibook.py#L880-L938
apache/thrift
0b29261a4f3c6882ef3b09aae47914f0012b0472
lib/py/src/server/TNonblockingServer.py
python
Connection.fileno
(self)
return self.socket.fileno()
Returns the file descriptor of the associated socket.
Returns the file descriptor of the associated socket.
[ "Returns", "the", "file", "descriptor", "of", "the", "associated", "socket", "." ]
def fileno(self): """Returns the file descriptor of the associated socket.""" return self.socket.fileno()
[ "def", "fileno", "(", "self", ")", ":", "return", "self", ".", "socket", ".", "fileno", "(", ")" ]
https://github.com/apache/thrift/blob/0b29261a4f3c6882ef3b09aae47914f0012b0472/lib/py/src/server/TNonblockingServer.py#L223-L225
ceph/ceph
959663007321a369c83218414a29bd9dbc8bda3a
src/pybind/mgr/dashboard/controllers/_permissions.py
python
CreatePermission
(func)
return func
:raises PermissionNotValid: If the permission is missing.
:raises PermissionNotValid: If the permission is missing.
[ ":", "raises", "PermissionNotValid", ":", "If", "the", "permission", "is", "missing", "." ]
def CreatePermission(func): # noqa: N802 """ :raises PermissionNotValid: If the permission is missing. """ _set_func_permissions(func, Permission.CREATE) return func
[ "def", "CreatePermission", "(", "func", ")", ":", "# noqa: N802", "_set_func_permissions", "(", "func", ",", "Permission", ".", "CREATE", ")", "return", "func" ]
https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/src/pybind/mgr/dashboard/controllers/_permissions.py#L39-L44
devsisters/libquic
8954789a056d8e7d5fcb6452fd1572ca57eb5c4e
src/third_party/protobuf/python/google/protobuf/internal/python_message.py
python
_AddHasExtensionMethod
(cls)
Helper for _AddMessageMethods().
Helper for _AddMessageMethods().
[ "Helper", "for", "_AddMessageMethods", "()", "." ]
def _AddHasExtensionMethod(cls): """Helper for _AddMessageMethods().""" def HasExtension(self, extension_handle): _VerifyExtensionHandle(self, extension_handle) if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: raise KeyError('"%s" is repeated.' % extension_handle.full_name) if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: value = self._fields.get(extension_handle) return value is not None and value._is_present_in_parent else: return extension_handle in self._fields cls.HasExtension = HasExtension
[ "def", "_AddHasExtensionMethod", "(", "cls", ")", ":", "def", "HasExtension", "(", "self", ",", "extension_handle", ")", ":", "_VerifyExtensionHandle", "(", "self", ",", "extension_handle", ")", "if", "extension_handle", ".", "label", "==", "_FieldDescriptor", ".", "LABEL_REPEATED", ":", "raise", "KeyError", "(", "'\"%s\" is repeated.'", "%", "extension_handle", ".", "full_name", ")", "if", "extension_handle", ".", "cpp_type", "==", "_FieldDescriptor", ".", "CPPTYPE_MESSAGE", ":", "value", "=", "self", ".", "_fields", ".", "get", "(", "extension_handle", ")", "return", "value", "is", "not", "None", "and", "value", ".", "_is_present_in_parent", "else", ":", "return", "extension_handle", "in", "self", ".", "_fields", "cls", ".", "HasExtension", "=", "HasExtension" ]
https://github.com/devsisters/libquic/blob/8954789a056d8e7d5fcb6452fd1572ca57eb5c4e/src/third_party/protobuf/python/google/protobuf/internal/python_message.py#L902-L914
hfinkel/llvm-project-cxxjit
91084ef018240bbb8e24235ff5cd8c355a9c1a1e
lldb/utils/lui/lldbutil.py
python
which
(program)
return None
Returns the full path to a program; None otherwise.
Returns the full path to a program; None otherwise.
[ "Returns", "the", "full", "path", "to", "a", "program", ";", "None", "otherwise", "." ]
def which(program): """Returns the full path to a program; None otherwise.""" fpath, fname = os.path.split(program) if fpath: if is_exe(program): return program else: for path in os.environ["PATH"].split(os.pathsep): exe_file = os.path.join(path, program) if is_exe(exe_file): return exe_file return None
[ "def", "which", "(", "program", ")", ":", "fpath", ",", "fname", "=", "os", ".", "path", ".", "split", "(", "program", ")", "if", "fpath", ":", "if", "is_exe", "(", "program", ")", ":", "return", "program", "else", ":", "for", "path", "in", "os", ".", "environ", "[", "\"PATH\"", "]", ".", "split", "(", "os", ".", "pathsep", ")", ":", "exe_file", "=", "os", ".", "path", ".", "join", "(", "path", ",", "program", ")", "if", "is_exe", "(", "exe_file", ")", ":", "return", "exe_file", "return", "None" ]
https://github.com/hfinkel/llvm-project-cxxjit/blob/91084ef018240bbb8e24235ff5cd8c355a9c1a1e/lldb/utils/lui/lldbutil.py#L30-L41
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/cuda/api.py
python
event
(timing=True)
return evt
Create a CUDA event. Timing data is only recorded by the event if it is created with ``timing=True``.
Create a CUDA event. Timing data is only recorded by the event if it is created with ``timing=True``.
[ "Create", "a", "CUDA", "event", ".", "Timing", "data", "is", "only", "recorded", "by", "the", "event", "if", "it", "is", "created", "with", "timing", "=", "True", "." ]
def event(timing=True): """ Create a CUDA event. Timing data is only recorded by the event if it is created with ``timing=True``. """ evt = current_context().create_event(timing=timing) return evt
[ "def", "event", "(", "timing", "=", "True", ")", ":", "evt", "=", "current_context", "(", ")", ".", "create_event", "(", "timing", "=", "timing", ")", "return", "evt" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/cuda/api.py#L317-L323
Kitware/VTK
5b4df4d90a4f31194d97d3c639dd38ea8f81e8b8
Wrapping/Python/vtkmodules/tk/vtkTkRenderWidget.py
python
vtkTkRenderWidget.BindTkRenderWidget
(self)
Bind some default actions.
Bind some default actions.
[ "Bind", "some", "default", "actions", "." ]
def BindTkRenderWidget(self): """ Bind some default actions. """ self.bind("<ButtonPress>", lambda e,s=self: s.StartMotion(e.x,e.y)) self.bind("<ButtonRelease>", lambda e,s=self: s.EndMotion(e.x,e.y)) self.bind("<B1-Motion>", lambda e,s=self: s.Rotate(e.x,e.y)) self.bind("<B2-Motion>", lambda e,s=self: s.Pan(e.x,e.y)) self.bind("<B3-Motion>", lambda e,s=self: s.Zoom(e.x,e.y)) self.bind("<Shift-B1-Motion>", lambda e,s=self: s.Pan(e.x,e.y)) self.bind("<KeyPress-r>", lambda e,s=self: s.Reset(e.x,e.y)) self.bind("<KeyPress-u>", lambda e,s=self: s.deiconify()) self.bind("<KeyPress-w>", lambda e,s=self: s.Wireframe()) self.bind("<KeyPress-s>", lambda e,s=self: s.Surface()) self.bind("<KeyPress-p>", lambda e,s=self: s.PickActor(e.x,e.y)) if self._FocusOnEnter: self.bind("<Enter>", lambda e,s=self: s.Enter(e.x,e.y)) self.bind("<Leave>", lambda e,s=self: s.Leave(e.x,e.y)) else: self.bind("<ButtonPress>", lambda e,s=self: s.Enter(e.x,e.y)) self.bind("<Expose>", lambda e,s=self: s.Expose())
[ "def", "BindTkRenderWidget", "(", "self", ")", ":", "self", ".", "bind", "(", "\"<ButtonPress>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "StartMotion", "(", "e", ".", "x", ",", "e", ".", "y", ")", ")", "self", ".", "bind", "(", "\"<ButtonRelease>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "EndMotion", "(", "e", ".", "x", ",", "e", ".", "y", ")", ")", "self", ".", "bind", "(", "\"<B1-Motion>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "Rotate", "(", "e", ".", "x", ",", "e", ".", "y", ")", ")", "self", ".", "bind", "(", "\"<B2-Motion>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "Pan", "(", "e", ".", "x", ",", "e", ".", "y", ")", ")", "self", ".", "bind", "(", "\"<B3-Motion>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "Zoom", "(", "e", ".", "x", ",", "e", ".", "y", ")", ")", "self", ".", "bind", "(", "\"<Shift-B1-Motion>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "Pan", "(", "e", ".", "x", ",", "e", ".", "y", ")", ")", "self", ".", "bind", "(", "\"<KeyPress-r>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "Reset", "(", "e", ".", "x", ",", "e", ".", "y", ")", ")", "self", ".", "bind", "(", "\"<KeyPress-u>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "deiconify", "(", ")", ")", "self", ".", "bind", "(", "\"<KeyPress-w>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "Wireframe", "(", ")", ")", "self", ".", "bind", "(", "\"<KeyPress-s>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "Surface", "(", ")", ")", "self", ".", "bind", "(", "\"<KeyPress-p>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "PickActor", "(", "e", ".", "x", ",", "e", ".", "y", ")", ")", "if", "self", ".", "_FocusOnEnter", ":", "self", ".", "bind", "(", "\"<Enter>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "Enter", "(", "e", ".", "x", ",", "e", ".", "y", ")", ")", "self", ".", "bind", "(", "\"<Leave>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "Leave", "(", "e", ".", "x", ",", "e", ".", "y", ")", ")", "else", ":", "self", ".", "bind", "(", "\"<ButtonPress>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "Enter", "(", "e", ".", "x", ",", "e", ".", "y", ")", ")", "self", ".", "bind", "(", "\"<Expose>\"", ",", "lambda", "e", ",", "s", "=", "self", ":", "s", ".", "Expose", "(", ")", ")" ]
https://github.com/Kitware/VTK/blob/5b4df4d90a4f31194d97d3c639dd38ea8f81e8b8/Wrapping/Python/vtkmodules/tk/vtkTkRenderWidget.py#L150-L185
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_core.py
python
Window.GetScreenRect
(*args, **kwargs)
return _core_.Window_GetScreenRect(*args, **kwargs)
GetScreenRect(self) -> Rect Returns the size and position of the window in screen coordinantes as a `wx.Rect` object.
GetScreenRect(self) -> Rect
[ "GetScreenRect", "(", "self", ")", "-", ">", "Rect" ]
def GetScreenRect(*args, **kwargs): """ GetScreenRect(self) -> Rect Returns the size and position of the window in screen coordinantes as a `wx.Rect` object. """ return _core_.Window_GetScreenRect(*args, **kwargs)
[ "def", "GetScreenRect", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Window_GetScreenRect", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L9492-L9499
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/scipy/sparse/base.py
python
spmatrix.count_nonzero
(self)
Number of non-zero entries, equivalent to np.count_nonzero(a.toarray()) Unlike getnnz() and the nnz property, which return the number of stored entries (the length of the data attribute), this method counts the actual number of non-zero entries in data.
Number of non-zero entries, equivalent to
[ "Number", "of", "non", "-", "zero", "entries", "equivalent", "to" ]
def count_nonzero(self): """Number of non-zero entries, equivalent to np.count_nonzero(a.toarray()) Unlike getnnz() and the nnz property, which return the number of stored entries (the length of the data attribute), this method counts the actual number of non-zero entries in data. """ raise NotImplementedError("count_nonzero not implemented for %s." % self.__class__.__name__)
[ "def", "count_nonzero", "(", "self", ")", ":", "raise", "NotImplementedError", "(", "\"count_nonzero not implemented for %s.\"", "%", "self", ".", "__class__", ".", "__name__", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/sparse/base.py#L153-L163
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/setuptools/monkey.py
python
_patch_distribution_metadata
()
Patch write_pkg_file and read_pkg_file for higher metadata standards
Patch write_pkg_file and read_pkg_file for higher metadata standards
[ "Patch", "write_pkg_file", "and", "read_pkg_file", "for", "higher", "metadata", "standards" ]
def _patch_distribution_metadata(): """Patch write_pkg_file and read_pkg_file for higher metadata standards""" for attr in ('write_pkg_file', 'read_pkg_file', 'get_metadata_version'): new_val = getattr(setuptools.dist, attr) setattr(distutils.dist.DistributionMetadata, attr, new_val)
[ "def", "_patch_distribution_metadata", "(", ")", ":", "for", "attr", "in", "(", "'write_pkg_file'", ",", "'read_pkg_file'", ",", "'get_metadata_version'", ")", ":", "new_val", "=", "getattr", "(", "setuptools", ".", "dist", ",", "attr", ")", "setattr", "(", "distutils", ".", "dist", ".", "DistributionMetadata", ",", "attr", ",", "new_val", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/setuptools/monkey.py#L104-L108
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/mox3/mox3/mox.py
python
MethodSignatureChecker.Check
(self, params, named_params)
Ensures that the parameters used while recording a call are valid. Args: # params: A list of positional parameters. # named_params: A dict of named parameters. params: list named_params: dict Raises: AttributeError: the given parameters don't work with the given method.
Ensures that the parameters used while recording a call are valid.
[ "Ensures", "that", "the", "parameters", "used", "while", "recording", "a", "call", "are", "valid", "." ]
def Check(self, params, named_params): """Ensures that the parameters used while recording a call are valid. Args: # params: A list of positional parameters. # named_params: A dict of named parameters. params: list named_params: dict Raises: AttributeError: the given parameters don't work with the given method. """ arg_status = dict((a, MethodSignatureChecker._NEEDED) for a in self._required_args) for arg in self._default_args: arg_status[arg] = MethodSignatureChecker._DEFAULT # WARNING: Suspect hack ahead. # # Check to see if this is an unbound method, where the instance # should be bound as the first argument. We try to determine if # the first argument (param[0]) is an instance of the class, or it # is equivalent to the class (used to account for Comparators). # # NOTE: If a Func() comparator is used, and the signature is not # correct, this will cause extra executions of the function. if inspect.ismethod(self._method) or self._bounded_to: # The extra param accounts for the bound instance. if len(params) > len(self._required_args): expected = self._bounded_to # Check if the param is an instance of the expected class, # or check equality (useful for checking Comparators). # This is a hack to work around the fact that the first # parameter can be a Comparator, and the comparison may raise # an exception during this comparison, which is OK. try: param_equality = (params[0] == expected) except Exception: param_equality = False if isinstance(params[0], expected) or param_equality: params = params[1:] # If the IsA() comparator is being used, we need to check the # inverse of the usual case - that the given instance is a # subclass of the expected class. For example, the code under # test does late binding to a subclass. elif (isinstance(params[0], IsA) and params[0]._IsSubClass(expected)): params = params[1:] # Check that each positional param is valid. for i in range(len(params)): try: arg_name = self._args[i] except IndexError: if not self._has_varargs: raise AttributeError( '%s does not take %d or more positional ' 'arguments' % (self._method.__name__, i)) else: self._RecordArgumentGiven(arg_name, arg_status) # Check each keyword argument. for arg_name in named_params: if arg_name not in arg_status and not self._has_varkw: raise AttributeError('%s is not expecting keyword argument %s' % (self._method.__name__, arg_name)) self._RecordArgumentGiven(arg_name, arg_status) # Ensure all the required arguments have been given. still_needed = [k for k, v in arg_status.items() if v == MethodSignatureChecker._NEEDED] if still_needed: raise AttributeError('No values given for arguments: %s' % (' '.join(sorted(still_needed))))
[ "def", "Check", "(", "self", ",", "params", ",", "named_params", ")", ":", "arg_status", "=", "dict", "(", "(", "a", ",", "MethodSignatureChecker", ".", "_NEEDED", ")", "for", "a", "in", "self", ".", "_required_args", ")", "for", "arg", "in", "self", ".", "_default_args", ":", "arg_status", "[", "arg", "]", "=", "MethodSignatureChecker", ".", "_DEFAULT", "# WARNING: Suspect hack ahead.", "#", "# Check to see if this is an unbound method, where the instance", "# should be bound as the first argument. We try to determine if", "# the first argument (param[0]) is an instance of the class, or it", "# is equivalent to the class (used to account for Comparators).", "#", "# NOTE: If a Func() comparator is used, and the signature is not", "# correct, this will cause extra executions of the function.", "if", "inspect", ".", "ismethod", "(", "self", ".", "_method", ")", "or", "self", ".", "_bounded_to", ":", "# The extra param accounts for the bound instance.", "if", "len", "(", "params", ")", ">", "len", "(", "self", ".", "_required_args", ")", ":", "expected", "=", "self", ".", "_bounded_to", "# Check if the param is an instance of the expected class,", "# or check equality (useful for checking Comparators).", "# This is a hack to work around the fact that the first", "# parameter can be a Comparator, and the comparison may raise", "# an exception during this comparison, which is OK.", "try", ":", "param_equality", "=", "(", "params", "[", "0", "]", "==", "expected", ")", "except", "Exception", ":", "param_equality", "=", "False", "if", "isinstance", "(", "params", "[", "0", "]", ",", "expected", ")", "or", "param_equality", ":", "params", "=", "params", "[", "1", ":", "]", "# If the IsA() comparator is being used, we need to check the", "# inverse of the usual case - that the given instance is a", "# subclass of the expected class. For example, the code under", "# test does late binding to a subclass.", "elif", "(", "isinstance", "(", "params", "[", "0", "]", ",", "IsA", ")", "and", "params", "[", "0", "]", ".", "_IsSubClass", "(", "expected", ")", ")", ":", "params", "=", "params", "[", "1", ":", "]", "# Check that each positional param is valid.", "for", "i", "in", "range", "(", "len", "(", "params", ")", ")", ":", "try", ":", "arg_name", "=", "self", ".", "_args", "[", "i", "]", "except", "IndexError", ":", "if", "not", "self", ".", "_has_varargs", ":", "raise", "AttributeError", "(", "'%s does not take %d or more positional '", "'arguments'", "%", "(", "self", ".", "_method", ".", "__name__", ",", "i", ")", ")", "else", ":", "self", ".", "_RecordArgumentGiven", "(", "arg_name", ",", "arg_status", ")", "# Check each keyword argument.", "for", "arg_name", "in", "named_params", ":", "if", "arg_name", "not", "in", "arg_status", "and", "not", "self", ".", "_has_varkw", ":", "raise", "AttributeError", "(", "'%s is not expecting keyword argument %s'", "%", "(", "self", ".", "_method", ".", "__name__", ",", "arg_name", ")", ")", "self", ".", "_RecordArgumentGiven", "(", "arg_name", ",", "arg_status", ")", "# Ensure all the required arguments have been given.", "still_needed", "=", "[", "k", "for", "k", ",", "v", "in", "arg_status", ".", "items", "(", ")", "if", "v", "==", "MethodSignatureChecker", ".", "_NEEDED", "]", "if", "still_needed", ":", "raise", "AttributeError", "(", "'No values given for arguments: %s'", "%", "(", "' '", ".", "join", "(", "sorted", "(", "still_needed", ")", ")", ")", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/mox3/mox3/mox.py#L952-L1029
kushview/Element
1cc16380caa2ab79461246ba758b9de1f46db2a5
waflib/Tools/suncc.py
python
find_scc
(conf)
Detects the Sun C compiler
Detects the Sun C compiler
[ "Detects", "the", "Sun", "C", "compiler" ]
def find_scc(conf): """ Detects the Sun C compiler """ v = conf.env cc = conf.find_program('cc', var='CC') try: conf.cmd_and_log(cc + ['-flags']) except Errors.WafError: conf.fatal('%r is not a Sun compiler' % cc) v.CC_NAME = 'sun' conf.get_suncc_version(cc)
[ "def", "find_scc", "(", "conf", ")", ":", "v", "=", "conf", ".", "env", "cc", "=", "conf", ".", "find_program", "(", "'cc'", ",", "var", "=", "'CC'", ")", "try", ":", "conf", ".", "cmd_and_log", "(", "cc", "+", "[", "'-flags'", "]", ")", "except", "Errors", ".", "WafError", ":", "conf", ".", "fatal", "(", "'%r is not a Sun compiler'", "%", "cc", ")", "v", ".", "CC_NAME", "=", "'sun'", "conf", ".", "get_suncc_version", "(", "cc", ")" ]
https://github.com/kushview/Element/blob/1cc16380caa2ab79461246ba758b9de1f46db2a5/waflib/Tools/suncc.py#L11-L22
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/glacier/layer1.py
python
Layer1.get_vault_notifications
(self, vault_name)
return self.make_request('GET', uri)
This operation retrieves the `notification-configuration` subresource of the specified vault. For information about setting a notification configuration on a vault, see SetVaultNotifications. If a notification configuration for a vault is not set, the operation returns a `404 Not Found` error. For more information about vault notifications, see `Configuring Vault Notifications in Amazon Glacier`_. An AWS account has full permission to perform all operations (actions). However, AWS Identity and Access Management (IAM) users don't have any permissions by default. You must grant them explicit permission to perform specific actions. For more information, see `Access Control Using AWS Identity and Access Management (IAM)`_. For conceptual information and underlying REST API, go to `Configuring Vault Notifications in Amazon Glacier`_ and `Get Vault Notification Configuration `_ in the Amazon Glacier Developer Guide . :type vault_name: string :param vault_name: The name of the vault.
This operation retrieves the `notification-configuration` subresource of the specified vault.
[ "This", "operation", "retrieves", "the", "notification", "-", "configuration", "subresource", "of", "the", "specified", "vault", "." ]
def get_vault_notifications(self, vault_name): """ This operation retrieves the `notification-configuration` subresource of the specified vault. For information about setting a notification configuration on a vault, see SetVaultNotifications. If a notification configuration for a vault is not set, the operation returns a `404 Not Found` error. For more information about vault notifications, see `Configuring Vault Notifications in Amazon Glacier`_. An AWS account has full permission to perform all operations (actions). However, AWS Identity and Access Management (IAM) users don't have any permissions by default. You must grant them explicit permission to perform specific actions. For more information, see `Access Control Using AWS Identity and Access Management (IAM)`_. For conceptual information and underlying REST API, go to `Configuring Vault Notifications in Amazon Glacier`_ and `Get Vault Notification Configuration `_ in the Amazon Glacier Developer Guide . :type vault_name: string :param vault_name: The name of the vault. """ uri = 'vaults/%s/notification-configuration' % vault_name return self.make_request('GET', uri)
[ "def", "get_vault_notifications", "(", "self", ",", "vault_name", ")", ":", "uri", "=", "'vaults/%s/notification-configuration'", "%", "vault_name", "return", "self", ".", "make_request", "(", "'GET'", ",", "uri", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/glacier/layer1.py#L268-L296
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py2/pandas/core/generic.py
python
NDFrame.get_dtype_counts
(self)
return Series(self._data.get_dtype_counts())
Return counts of unique dtypes in this object. Returns ------- dtype : Series Series with the count of columns with each dtype. See Also -------- dtypes : Return the dtypes in this object. Examples -------- >>> a = [['a', 1, 1.0], ['b', 2, 2.0], ['c', 3, 3.0]] >>> df = pd.DataFrame(a, columns=['str', 'int', 'float']) >>> df str int float 0 a 1 1.0 1 b 2 2.0 2 c 3 3.0 >>> df.get_dtype_counts() float64 1 int64 1 object 1 dtype: int64
Return counts of unique dtypes in this object.
[ "Return", "counts", "of", "unique", "dtypes", "in", "this", "object", "." ]
def get_dtype_counts(self): """ Return counts of unique dtypes in this object. Returns ------- dtype : Series Series with the count of columns with each dtype. See Also -------- dtypes : Return the dtypes in this object. Examples -------- >>> a = [['a', 1, 1.0], ['b', 2, 2.0], ['c', 3, 3.0]] >>> df = pd.DataFrame(a, columns=['str', 'int', 'float']) >>> df str int float 0 a 1 1.0 1 b 2 2.0 2 c 3 3.0 >>> df.get_dtype_counts() float64 1 int64 1 object 1 dtype: int64 """ from pandas import Series return Series(self._data.get_dtype_counts())
[ "def", "get_dtype_counts", "(", "self", ")", ":", "from", "pandas", "import", "Series", "return", "Series", "(", "self", ".", "_data", ".", "get_dtype_counts", "(", ")", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/generic.py#L5382-L5412
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib2to3/refactor.py
python
RefactoringTool.refactor_file
(self, filename, write=False, doctests_only=False)
Refactors a file.
Refactors a file.
[ "Refactors", "a", "file", "." ]
def refactor_file(self, filename, write=False, doctests_only=False): """Refactors a file.""" input, encoding = self._read_python_source(filename) if input is None: # Reading the file failed. return input += u"\n" # Silence certain parse errors if doctests_only: self.log_debug("Refactoring doctests in %s", filename) output = self.refactor_docstring(input, filename) if self.write_unchanged_files or output != input: self.processed_file(output, filename, input, write, encoding) else: self.log_debug("No doctest changes in %s", filename) else: tree = self.refactor_string(input, filename) if self.write_unchanged_files or (tree and tree.was_changed): # The [:-1] is to take off the \n we added earlier self.processed_file(unicode(tree)[:-1], filename, write=write, encoding=encoding) else: self.log_debug("No changes in %s", filename)
[ "def", "refactor_file", "(", "self", ",", "filename", ",", "write", "=", "False", ",", "doctests_only", "=", "False", ")", ":", "input", ",", "encoding", "=", "self", ".", "_read_python_source", "(", "filename", ")", "if", "input", "is", "None", ":", "# Reading the file failed.", "return", "input", "+=", "u\"\\n\"", "# Silence certain parse errors", "if", "doctests_only", ":", "self", ".", "log_debug", "(", "\"Refactoring doctests in %s\"", ",", "filename", ")", "output", "=", "self", ".", "refactor_docstring", "(", "input", ",", "filename", ")", "if", "self", ".", "write_unchanged_files", "or", "output", "!=", "input", ":", "self", ".", "processed_file", "(", "output", ",", "filename", ",", "input", ",", "write", ",", "encoding", ")", "else", ":", "self", ".", "log_debug", "(", "\"No doctest changes in %s\"", ",", "filename", ")", "else", ":", "tree", "=", "self", ".", "refactor_string", "(", "input", ",", "filename", ")", "if", "self", ".", "write_unchanged_files", "or", "(", "tree", "and", "tree", ".", "was_changed", ")", ":", "# The [:-1] is to take off the \\n we added earlier", "self", ".", "processed_file", "(", "unicode", "(", "tree", ")", "[", ":", "-", "1", "]", ",", "filename", ",", "write", "=", "write", ",", "encoding", "=", "encoding", ")", "else", ":", "self", ".", "log_debug", "(", "\"No changes in %s\"", ",", "filename", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib2to3/refactor.py#L339-L360
fastmachinelearning/hls4ml
58d761006250deed721d85fefea91201708f2165
hls4ml/model/profiling.py
python
compare
(keras_model, hls_model, X, plot_type = "dist_diff")
return f
Compare each layer's output in keras and hls model. Note that the hls_model should not be compiled before using this. Parameters ---------- keras_model : original keras model hls_model : converted ModelGraph, with "Trace:True" in the configuration file. X : array-like Input for the model. plot_type : string different methods to visualize the y_model and y_sim differences. Possible options include: - 'norm_diff' : square root of the sum of the squares of the differences between each output vectors - 'dist_diff' : The normalized distribution of the differences of the elements between two output vectors Returns ------- matplotlib figure plot object of the histogram depicting the difference in each layer's output
Compare each layer's output in keras and hls model. Note that the hls_model should not be compiled before using this.
[ "Compare", "each", "layer", "s", "output", "in", "keras", "and", "hls", "model", ".", "Note", "that", "the", "hls_model", "should", "not", "be", "compiled", "before", "using", "this", "." ]
def compare(keras_model, hls_model, X, plot_type = "dist_diff"): """ Compare each layer's output in keras and hls model. Note that the hls_model should not be compiled before using this. Parameters ---------- keras_model : original keras model hls_model : converted ModelGraph, with "Trace:True" in the configuration file. X : array-like Input for the model. plot_type : string different methods to visualize the y_model and y_sim differences. Possible options include: - 'norm_diff' : square root of the sum of the squares of the differences between each output vectors - 'dist_diff' : The normalized distribution of the differences of the elements between two output vectors Returns ------- matplotlib figure plot object of the histogram depicting the difference in each layer's output """ #Take in output from both models #Note that each y is a dictionary with structure {"layer_name": flattened ouput array} ymodel = get_ymodel_keras(keras_model, X) _, ysim = hls_model.trace(X) print("Plotting difference...") f = plt.figure() if plot_type == "norm_diff": f = _norm_diff(ymodel, ysim) elif plot_type == "dist_diff": f = _dist_diff(ymodel, ysim) return f
[ "def", "compare", "(", "keras_model", ",", "hls_model", ",", "X", ",", "plot_type", "=", "\"dist_diff\"", ")", ":", "#Take in output from both models", "#Note that each y is a dictionary with structure {\"layer_name\": flattened ouput array}", "ymodel", "=", "get_ymodel_keras", "(", "keras_model", ",", "X", ")", "_", ",", "ysim", "=", "hls_model", ".", "trace", "(", "X", ")", "print", "(", "\"Plotting difference...\"", ")", "f", "=", "plt", ".", "figure", "(", ")", "if", "plot_type", "==", "\"norm_diff\"", ":", "f", "=", "_norm_diff", "(", "ymodel", ",", "ysim", ")", "elif", "plot_type", "==", "\"dist_diff\"", ":", "f", "=", "_dist_diff", "(", "ymodel", ",", "ysim", ")", "return", "f" ]
https://github.com/fastmachinelearning/hls4ml/blob/58d761006250deed721d85fefea91201708f2165/hls4ml/model/profiling.py#L655-L694
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/keras/engine/training_v1.py
python
Model._handle_per_output_metrics
(self, metrics_dict, y_true, y_pred, mask, weights=None)
return metric_results
Calls metric functions for a single output. Args: metrics_dict: A dict with metric names as keys and metric fns as values. y_true: Target output. y_pred: Predicted output. mask: Computed mask value for the current output. weights: Weights to be applied on the current output. Returns: A list of metric result tensors.
Calls metric functions for a single output.
[ "Calls", "metric", "functions", "for", "a", "single", "output", "." ]
def _handle_per_output_metrics(self, metrics_dict, y_true, y_pred, mask, weights=None): """Calls metric functions for a single output. Args: metrics_dict: A dict with metric names as keys and metric fns as values. y_true: Target output. y_pred: Predicted output. mask: Computed mask value for the current output. weights: Weights to be applied on the current output. Returns: A list of metric result tensors. """ metric_results = [] for metric_name, metric_fn in metrics_dict.items(): with backend.name_scope(metric_name): metric_result = training_utils_v1.call_metric_function( metric_fn, y_true, y_pred, weights=weights, mask=mask) metric_results.append(metric_result) return metric_results
[ "def", "_handle_per_output_metrics", "(", "self", ",", "metrics_dict", ",", "y_true", ",", "y_pred", ",", "mask", ",", "weights", "=", "None", ")", ":", "metric_results", "=", "[", "]", "for", "metric_name", ",", "metric_fn", "in", "metrics_dict", ".", "items", "(", ")", ":", "with", "backend", ".", "name_scope", "(", "metric_name", ")", ":", "metric_result", "=", "training_utils_v1", ".", "call_metric_function", "(", "metric_fn", ",", "y_true", ",", "y_pred", ",", "weights", "=", "weights", ",", "mask", "=", "mask", ")", "metric_results", ".", "append", "(", "metric_result", ")", "return", "metric_results" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/engine/training_v1.py#L1901-L1925
rapidsai/cudf
d5b2448fc69f17509304d594f029d0df56984962
python/cudf/cudf/core/series.py
python
Series.nullmask
(self)
return cudf.Series(self._column.nullmask)
The gpu buffer for the null-mask
The gpu buffer for the null-mask
[ "The", "gpu", "buffer", "for", "the", "null", "-", "mask" ]
def nullmask(self): """The gpu buffer for the null-mask""" return cudf.Series(self._column.nullmask)
[ "def", "nullmask", "(", "self", ")", ":", "return", "cudf", ".", "Series", "(", "self", ".", "_column", ".", "nullmask", ")" ]
https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/cudf/cudf/core/series.py#L1815-L1817
livecode/livecode
4606a10ea10b16d5071d0f9f263ccdd7ede8b31d
gyp/pylib/gyp/MSVSSettings.py
python
_Same
(tool, name, setting_type)
Defines a setting that has the same name in MSVS and MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting.
Defines a setting that has the same name in MSVS and MSBuild.
[ "Defines", "a", "setting", "that", "has", "the", "same", "name", "in", "MSVS", "and", "MSBuild", "." ]
def _Same(tool, name, setting_type): """Defines a setting that has the same name in MSVS and MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ _Renamed(tool, name, name, setting_type)
[ "def", "_Same", "(", "tool", ",", "name", ",", "setting_type", ")", ":", "_Renamed", "(", "tool", ",", "name", ",", "name", ",", "setting_type", ")" ]
https://github.com/livecode/livecode/blob/4606a10ea10b16d5071d0f9f263ccdd7ede8b31d/gyp/pylib/gyp/MSVSSettings.py#L233-L241
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/scipy/sparse/sputils.py
python
downcast_intp_index
(arr)
return arr
Down-cast index array to np.intp dtype if it is of a larger dtype. Raise an error if the array contains a value that is too large for intp.
Down-cast index array to np.intp dtype if it is of a larger dtype.
[ "Down", "-", "cast", "index", "array", "to", "np", ".", "intp", "dtype", "if", "it", "is", "of", "a", "larger", "dtype", "." ]
def downcast_intp_index(arr): """ Down-cast index array to np.intp dtype if it is of a larger dtype. Raise an error if the array contains a value that is too large for intp. """ if arr.dtype.itemsize > np.dtype(np.intp).itemsize: if arr.size == 0: return arr.astype(np.intp) maxval = arr.max() minval = arr.min() if maxval > np.iinfo(np.intp).max or minval < np.iinfo(np.intp).min: raise ValueError("Cannot deal with arrays with indices larger " "than the machine maximum address size " "(e.g. 64-bit indices on 32-bit machine).") return arr.astype(np.intp) return arr
[ "def", "downcast_intp_index", "(", "arr", ")", ":", "if", "arr", ".", "dtype", ".", "itemsize", ">", "np", ".", "dtype", "(", "np", ".", "intp", ")", ".", "itemsize", ":", "if", "arr", ".", "size", "==", "0", ":", "return", "arr", ".", "astype", "(", "np", ".", "intp", ")", "maxval", "=", "arr", ".", "max", "(", ")", "minval", "=", "arr", ".", "min", "(", ")", "if", "maxval", ">", "np", ".", "iinfo", "(", "np", ".", "intp", ")", ".", "max", "or", "minval", "<", "np", ".", "iinfo", "(", "np", ".", "intp", ")", ".", "min", ":", "raise", "ValueError", "(", "\"Cannot deal with arrays with indices larger \"", "\"than the machine maximum address size \"", "\"(e.g. 64-bit indices on 32-bit machine).\"", ")", "return", "arr", ".", "astype", "(", "np", ".", "intp", ")", "return", "arr" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/sparse/sputils.py#L79-L96
cvxpy/cvxpy
5165b4fb750dfd237de8659383ef24b4b2e33aaf
cvxpy/constraints/exponential.py
python
ExpCone.size
(self)
return 3 * self.num_cones()
The number of entries in the combined cones.
The number of entries in the combined cones.
[ "The", "number", "of", "entries", "in", "the", "combined", "cones", "." ]
def size(self) -> int: """The number of entries in the combined cones. """ return 3 * self.num_cones()
[ "def", "size", "(", "self", ")", "->", "int", ":", "return", "3", "*", "self", ".", "num_cones", "(", ")" ]
https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/constraints/exponential.py#L91-L94
microsoft/checkedc-clang
a173fefde5d7877b7750e7ce96dd08cf18baebf2
clang/tools/scan-build-py/libscanbuild/analyze.py
python
create_global_ctu_extdef_map
(extdef_map_lines)
return mangled_ast_pairs
Takes iterator of individual external definition maps and creates a global map keeping only unique names. We leave conflicting names out of CTU. :param extdef_map_lines: Contains the id of a definition (mangled name) and the originating source (the corresponding AST file) name. :type extdef_map_lines: Iterator of str. :returns: Mangled name - AST file pairs. :rtype: List of (str, str) tuples.
Takes iterator of individual external definition maps and creates a global map keeping only unique names. We leave conflicting names out of CTU.
[ "Takes", "iterator", "of", "individual", "external", "definition", "maps", "and", "creates", "a", "global", "map", "keeping", "only", "unique", "names", ".", "We", "leave", "conflicting", "names", "out", "of", "CTU", "." ]
def create_global_ctu_extdef_map(extdef_map_lines): """ Takes iterator of individual external definition maps and creates a global map keeping only unique names. We leave conflicting names out of CTU. :param extdef_map_lines: Contains the id of a definition (mangled name) and the originating source (the corresponding AST file) name. :type extdef_map_lines: Iterator of str. :returns: Mangled name - AST file pairs. :rtype: List of (str, str) tuples. """ mangled_to_asts = defaultdict(set) for line in extdef_map_lines: mangled_name, ast_file = line.strip().split(' ', 1) mangled_to_asts[mangled_name].add(ast_file) mangled_ast_pairs = [] for mangled_name, ast_files in mangled_to_asts.items(): if len(ast_files) == 1: mangled_ast_pairs.append((mangled_name, next(iter(ast_files)))) return mangled_ast_pairs
[ "def", "create_global_ctu_extdef_map", "(", "extdef_map_lines", ")", ":", "mangled_to_asts", "=", "defaultdict", "(", "set", ")", "for", "line", "in", "extdef_map_lines", ":", "mangled_name", ",", "ast_file", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "' '", ",", "1", ")", "mangled_to_asts", "[", "mangled_name", "]", ".", "add", "(", "ast_file", ")", "mangled_ast_pairs", "=", "[", "]", "for", "mangled_name", ",", "ast_files", "in", "mangled_to_asts", ".", "items", "(", ")", ":", "if", "len", "(", "ast_files", ")", "==", "1", ":", "mangled_ast_pairs", ".", "append", "(", "(", "mangled_name", ",", "next", "(", "iter", "(", "ast_files", ")", ")", ")", ")", "return", "mangled_ast_pairs" ]
https://github.com/microsoft/checkedc-clang/blob/a173fefde5d7877b7750e7ce96dd08cf18baebf2/clang/tools/scan-build-py/libscanbuild/analyze.py#L136-L160
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/ed_stc.py
python
EditraStc.SyntaxOnOff
(self, switch=None)
return 0
Turn Syntax Highlighting on and off @keyword switch: force a particular setting
Turn Syntax Highlighting on and off @keyword switch: force a particular setting
[ "Turn", "Syntax", "Highlighting", "on", "and", "off", "@keyword", "switch", ":", "force", "a", "particular", "setting" ]
def SyntaxOnOff(self, switch=None): """Turn Syntax Highlighting on and off @keyword switch: force a particular setting """ if (switch is None and not self._config['highlight']) or switch: self.LOG("[ed_stc][evt] Syntax Highlighting Turned On") self._config['highlight'] = True self.FindLexer() else: self.LOG("[ed_stc][evt] Syntax Highlighting Turned Off") self._config['highlight'] = False self.SetLexer(wx.stc.STC_LEX_NULL) self.ClearDocumentStyle() self.UpdateBaseStyles() return 0
[ "def", "SyntaxOnOff", "(", "self", ",", "switch", "=", "None", ")", ":", "if", "(", "switch", "is", "None", "and", "not", "self", ".", "_config", "[", "'highlight'", "]", ")", "or", "switch", ":", "self", ".", "LOG", "(", "\"[ed_stc][evt] Syntax Highlighting Turned On\"", ")", "self", ".", "_config", "[", "'highlight'", "]", "=", "True", "self", ".", "FindLexer", "(", ")", "else", ":", "self", ".", "LOG", "(", "\"[ed_stc][evt] Syntax Highlighting Turned Off\"", ")", "self", ".", "_config", "[", "'highlight'", "]", "=", "False", "self", ".", "SetLexer", "(", "wx", ".", "stc", ".", "STC_LEX_NULL", ")", "self", ".", "ClearDocumentStyle", "(", ")", "self", ".", "UpdateBaseStyles", "(", ")", "return", "0" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/ed_stc.py#L1606-L1621
openmm/openmm
cb293447c4fc8b03976dfe11399f107bab70f3d9
wrappers/python/openmm/unit/baseunit.py
python
BaseUnit.conversion_factor_to
(self, other)
return self._conversion_factor_to_by_name[other.name]
Returns a conversion factor from this BaseUnit to another BaseUnit. It does not matter which existing BaseUnit you define the conversion factor to. Conversions for all other known BaseUnits will be computed at the same time. Raises TypeError if dimension does not match. Raises LookupError if no conversion has been defined. (see define_conversion_factor_to).
Returns a conversion factor from this BaseUnit to another BaseUnit.
[ "Returns", "a", "conversion", "factor", "from", "this", "BaseUnit", "to", "another", "BaseUnit", "." ]
def conversion_factor_to(self, other): """Returns a conversion factor from this BaseUnit to another BaseUnit. It does not matter which existing BaseUnit you define the conversion factor to. Conversions for all other known BaseUnits will be computed at the same time. Raises TypeError if dimension does not match. Raises LookupError if no conversion has been defined. (see define_conversion_factor_to). """ if self is other: return 1.0 if self.dimension != other.dimension: raise TypeError('Cannot get conversion for BaseUnits with different dimensions.') if not other.name in self._conversion_factor_to_by_name: raise LookupError('No conversion defined from BaseUnit "%s" to "%s".' % (self, other)) return self._conversion_factor_to_by_name[other.name]
[ "def", "conversion_factor_to", "(", "self", ",", "other", ")", ":", "if", "self", "is", "other", ":", "return", "1.0", "if", "self", ".", "dimension", "!=", "other", ".", "dimension", ":", "raise", "TypeError", "(", "'Cannot get conversion for BaseUnits with different dimensions.'", ")", "if", "not", "other", ".", "name", "in", "self", ".", "_conversion_factor_to_by_name", ":", "raise", "LookupError", "(", "'No conversion defined from BaseUnit \"%s\" to \"%s\".'", "%", "(", "self", ",", "other", ")", ")", "return", "self", ".", "_conversion_factor_to_by_name", "[", "other", ".", "name", "]" ]
https://github.com/openmm/openmm/blob/cb293447c4fc8b03976dfe11399f107bab70f3d9/wrappers/python/openmm/unit/baseunit.py#L149-L164
oracle/graaljs
36a56e8e993d45fc40939a3a4d9c0c24990720f1
graal-nodejs/deps/v8/third_party/jinja2/parser.py
python
Parser.parse_if
(self)
return result
Parse an if construct.
Parse an if construct.
[ "Parse", "an", "if", "construct", "." ]
def parse_if(self): """Parse an if construct.""" node = result = nodes.If(lineno=self.stream.expect('name:if').lineno) while 1: node.test = self.parse_tuple(with_condexpr=False) node.body = self.parse_statements(('name:elif', 'name:else', 'name:endif')) node.elif_ = [] node.else_ = [] token = next(self.stream) if token.test('name:elif'): node = nodes.If(lineno=self.stream.current.lineno) result.elif_.append(node) continue elif token.test('name:else'): result.else_ = self.parse_statements(('name:endif',), drop_needle=True) break return result
[ "def", "parse_if", "(", "self", ")", ":", "node", "=", "result", "=", "nodes", ".", "If", "(", "lineno", "=", "self", ".", "stream", ".", "expect", "(", "'name:if'", ")", ".", "lineno", ")", "while", "1", ":", "node", ".", "test", "=", "self", ".", "parse_tuple", "(", "with_condexpr", "=", "False", ")", "node", ".", "body", "=", "self", ".", "parse_statements", "(", "(", "'name:elif'", ",", "'name:else'", ",", "'name:endif'", ")", ")", "node", ".", "elif_", "=", "[", "]", "node", ".", "else_", "=", "[", "]", "token", "=", "next", "(", "self", ".", "stream", ")", "if", "token", ".", "test", "(", "'name:elif'", ")", ":", "node", "=", "nodes", ".", "If", "(", "lineno", "=", "self", ".", "stream", ".", "current", ".", "lineno", ")", "result", ".", "elif_", ".", "append", "(", "node", ")", "continue", "elif", "token", ".", "test", "(", "'name:else'", ")", ":", "result", ".", "else_", "=", "self", ".", "parse_statements", "(", "(", "'name:endif'", ",", ")", ",", "drop_needle", "=", "True", ")", "break", "return", "result" ]
https://github.com/oracle/graaljs/blob/36a56e8e993d45fc40939a3a4d9c0c24990720f1/graal-nodejs/deps/v8/third_party/jinja2/parser.py#L207-L225
gimli-org/gimli
17aa2160de9b15ababd9ef99e89b1bc3277bbb23
pygimli/meshtools/mesh.py
python
exportHDF5Mesh
(mesh, exportname, group='mesh', indices='cell_indices', pos='coordinates', cells='topology', marker='values')
return True
Writes given :gimliapi:`GIMLI::Mesh` in a hdf5 format file. 3D tetrahedral meshes only! Boundary markers are ignored. Keywords are explained in :py:mod:`pygimli.meshtools.readHDFS`
Writes given :gimliapi:`GIMLI::Mesh` in a hdf5 format file.
[ "Writes", "given", ":", "gimliapi", ":", "GIMLI", "::", "Mesh", "in", "a", "hdf5", "format", "file", "." ]
def exportHDF5Mesh(mesh, exportname, group='mesh', indices='cell_indices', pos='coordinates', cells='topology', marker='values'): """Writes given :gimliapi:`GIMLI::Mesh` in a hdf5 format file. 3D tetrahedral meshes only! Boundary markers are ignored. Keywords are explained in :py:mod:`pygimli.meshtools.readHDFS` """ h5py = pg.optImport('h5py', requiredFor='export mesh in .h5 data format') if not isinstance(mesh, pg.Mesh): mesh = pg.Mesh(mesh) # prepare output for writing in hdf data container pg_pos = mesh.positions() mesh_pos = np.array((np.array(pg.x(pg_pos)), np.array(pg.y(pg_pos)), np.array(pg.z(pg_pos)))).T mesh_cells = np.zeros((mesh.cellCount(), 4)) # hard coded for tetrahedrons for i, cell in enumerate(mesh.cells()): mesh_cells[i] = cell.ids() mesh_indices = np.arange(0, mesh.cellCount() + 1, 1, dtype=np.int64) mesh_markers = np.array(mesh.cellMarkers()) with h5py.File(exportname, 'w') as out: for grp in np.atleast_1d(group): # can use more than one group # writing indices idx_name = '{}/{}'.format(grp, indices) out.create_dataset(idx_name, data=mesh_indices, dtype=int) # writing node positions pos_name = '{}/{}'.format(grp, pos) out.create_dataset(pos_name, data=mesh_pos, dtype=float) # writing cells via indices cells_name = '{}/{}'.format(grp, cells) out.create_dataset(cells_name, data=mesh_cells, dtype=int) # writing marker marker_name = '{}/{}'.format(grp, marker) out.create_dataset(marker_name, data=mesh_markers, dtype=int) out[grp][cells].attrs['celltype'] = np.string_('tetrahedron') out[grp][cells].attrs.create('partition', [0]) return True
[ "def", "exportHDF5Mesh", "(", "mesh", ",", "exportname", ",", "group", "=", "'mesh'", ",", "indices", "=", "'cell_indices'", ",", "pos", "=", "'coordinates'", ",", "cells", "=", "'topology'", ",", "marker", "=", "'values'", ")", ":", "h5py", "=", "pg", ".", "optImport", "(", "'h5py'", ",", "requiredFor", "=", "'export mesh in .h5 data format'", ")", "if", "not", "isinstance", "(", "mesh", ",", "pg", ".", "Mesh", ")", ":", "mesh", "=", "pg", ".", "Mesh", "(", "mesh", ")", "# prepare output for writing in hdf data container", "pg_pos", "=", "mesh", ".", "positions", "(", ")", "mesh_pos", "=", "np", ".", "array", "(", "(", "np", ".", "array", "(", "pg", ".", "x", "(", "pg_pos", ")", ")", ",", "np", ".", "array", "(", "pg", ".", "y", "(", "pg_pos", ")", ")", ",", "np", ".", "array", "(", "pg", ".", "z", "(", "pg_pos", ")", ")", ")", ")", ".", "T", "mesh_cells", "=", "np", ".", "zeros", "(", "(", "mesh", ".", "cellCount", "(", ")", ",", "4", ")", ")", "# hard coded for tetrahedrons", "for", "i", ",", "cell", "in", "enumerate", "(", "mesh", ".", "cells", "(", ")", ")", ":", "mesh_cells", "[", "i", "]", "=", "cell", ".", "ids", "(", ")", "mesh_indices", "=", "np", ".", "arange", "(", "0", ",", "mesh", ".", "cellCount", "(", ")", "+", "1", ",", "1", ",", "dtype", "=", "np", ".", "int64", ")", "mesh_markers", "=", "np", ".", "array", "(", "mesh", ".", "cellMarkers", "(", ")", ")", "with", "h5py", ".", "File", "(", "exportname", ",", "'w'", ")", "as", "out", ":", "for", "grp", "in", "np", ".", "atleast_1d", "(", "group", ")", ":", "# can use more than one group", "# writing indices", "idx_name", "=", "'{}/{}'", ".", "format", "(", "grp", ",", "indices", ")", "out", ".", "create_dataset", "(", "idx_name", ",", "data", "=", "mesh_indices", ",", "dtype", "=", "int", ")", "# writing node positions", "pos_name", "=", "'{}/{}'", ".", "format", "(", "grp", ",", "pos", ")", "out", ".", "create_dataset", "(", "pos_name", ",", "data", "=", "mesh_pos", ",", "dtype", "=", "float", ")", "# writing cells via indices", "cells_name", "=", "'{}/{}'", ".", "format", "(", "grp", ",", "cells", ")", "out", ".", "create_dataset", "(", "cells_name", ",", "data", "=", "mesh_cells", ",", "dtype", "=", "int", ")", "# writing marker", "marker_name", "=", "'{}/{}'", ".", "format", "(", "grp", ",", "marker", ")", "out", ".", "create_dataset", "(", "marker_name", ",", "data", "=", "mesh_markers", ",", "dtype", "=", "int", ")", "out", "[", "grp", "]", "[", "cells", "]", ".", "attrs", "[", "'celltype'", "]", "=", "np", ".", "string_", "(", "'tetrahedron'", ")", "out", "[", "grp", "]", "[", "cells", "]", ".", "attrs", ".", "create", "(", "'partition'", ",", "[", "0", "]", ")", "return", "True" ]
https://github.com/gimli-org/gimli/blob/17aa2160de9b15ababd9ef99e89b1bc3277bbb23/pygimli/meshtools/mesh.py#L1479-L1521
nyuwireless-unipd/ns3-mmwave
4ff9e87e8079764e04cbeccd8e85bff15ae16fb3
src/core/examples/sample-simulator.py
python
MyModel.Start
(self)
Start model execution by scheduling a HandleEvent.
Start model execution by scheduling a HandleEvent.
[ "Start", "model", "execution", "by", "scheduling", "a", "HandleEvent", "." ]
def Start(self): """Start model execution by scheduling a HandleEvent.""" ns.core.Simulator.Schedule(ns.core.Seconds(10.0), self.HandleEvent, ns.core.Simulator.Now().GetSeconds())
[ "def", "Start", "(", "self", ")", ":", "ns", ".", "core", ".", "Simulator", ".", "Schedule", "(", "ns", ".", "core", ".", "Seconds", "(", "10.0", ")", ",", "self", ".", "HandleEvent", ",", "ns", ".", "core", ".", "Simulator", ".", "Now", "(", ")", ".", "GetSeconds", "(", ")", ")" ]
https://github.com/nyuwireless-unipd/ns3-mmwave/blob/4ff9e87e8079764e04cbeccd8e85bff15ae16fb3/src/core/examples/sample-simulator.py#L35-L37
hfinkel/llvm-project-cxxjit
91084ef018240bbb8e24235ff5cd8c355a9c1a1e
llvm/bindings/python/llvm/object.py
python
ObjectFile.get_sections
(self, cache=False)
Obtain the sections in this object file. This is a generator for llvm.object.Section instances. Sections are exposed as limited-use objects. See the module's documentation on iterators for more.
Obtain the sections in this object file.
[ "Obtain", "the", "sections", "in", "this", "object", "file", "." ]
def get_sections(self, cache=False): """Obtain the sections in this object file. This is a generator for llvm.object.Section instances. Sections are exposed as limited-use objects. See the module's documentation on iterators for more. """ sections = lib.LLVMGetSections(self) last = None while True: if lib.LLVMIsSectionIteratorAtEnd(self, sections): break last = Section(sections) if cache: last.cache() yield last lib.LLVMMoveToNextSection(sections) last.expire() if last is not None: last.expire() lib.LLVMDisposeSectionIterator(sections)
[ "def", "get_sections", "(", "self", ",", "cache", "=", "False", ")", ":", "sections", "=", "lib", ".", "LLVMGetSections", "(", "self", ")", "last", "=", "None", "while", "True", ":", "if", "lib", ".", "LLVMIsSectionIteratorAtEnd", "(", "self", ",", "sections", ")", ":", "break", "last", "=", "Section", "(", "sections", ")", "if", "cache", ":", "last", ".", "cache", "(", ")", "yield", "last", "lib", ".", "LLVMMoveToNextSection", "(", "sections", ")", "last", ".", "expire", "(", ")", "if", "last", "is", "not", "None", ":", "last", ".", "expire", "(", ")", "lib", ".", "LLVMDisposeSectionIterator", "(", "sections", ")" ]
https://github.com/hfinkel/llvm-project-cxxjit/blob/91084ef018240bbb8e24235ff5cd8c355a9c1a1e/llvm/bindings/python/llvm/object.py#L122-L148
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py2/scipy/linalg/_interpolative_backend.py
python
idzr_svd
(A, k)
return U, V, S
Compute SVD of a complex matrix to a specified rank. :param A: Matrix. :type A: :class:`numpy.ndarray` :param k: Rank of SVD. :type k: int :return: Left singular vectors. :rtype: :class:`numpy.ndarray` :return: Right singular vectors. :rtype: :class:`numpy.ndarray` :return: Singular values. :rtype: :class:`numpy.ndarray`
Compute SVD of a complex matrix to a specified rank.
[ "Compute", "SVD", "of", "a", "complex", "matrix", "to", "a", "specified", "rank", "." ]
def idzr_svd(A, k): """ Compute SVD of a complex matrix to a specified rank. :param A: Matrix. :type A: :class:`numpy.ndarray` :param k: Rank of SVD. :type k: int :return: Left singular vectors. :rtype: :class:`numpy.ndarray` :return: Right singular vectors. :rtype: :class:`numpy.ndarray` :return: Singular values. :rtype: :class:`numpy.ndarray` """ A = np.asfortranarray(A) U, V, S, ier = _id.idzr_svd(A, k) if ier: raise _RETCODE_ERROR return U, V, S
[ "def", "idzr_svd", "(", "A", ",", "k", ")", ":", "A", "=", "np", ".", "asfortranarray", "(", "A", ")", "U", ",", "V", ",", "S", ",", "ier", "=", "_id", ".", "idzr_svd", "(", "A", ",", "k", ")", "if", "ier", ":", "raise", "_RETCODE_ERROR", "return", "U", ",", "V", ",", "S" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/linalg/_interpolative_backend.py#L1214-L1239
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/xrc.py
python
XmlResourceHandler.AddStyle
(*args, **kwargs)
return _xrc.XmlResourceHandler_AddStyle(*args, **kwargs)
AddStyle(self, String name, int value)
AddStyle(self, String name, int value)
[ "AddStyle", "(", "self", "String", "name", "int", "value", ")" ]
def AddStyle(*args, **kwargs): """AddStyle(self, String name, int value)""" return _xrc.XmlResourceHandler_AddStyle(*args, **kwargs)
[ "def", "AddStyle", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_xrc", ".", "XmlResourceHandler_AddStyle", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/xrc.py#L647-L649
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/estimator.py
python
BaseEstimator._get_feature_ops_from_example
(self, examples_batch)
return tensor_signature.create_example_parser_from_signatures( self._features_info, examples_batch)
Returns feature parser for given example batch using features info. This function requires `fit()` has been called. Args: examples_batch: batch of tf.Example Returns: features: `Tensor` or `dict` of `Tensor` objects. Raises: ValueError: If `_features_info` attribute is not available (usually because `fit()` has not been called).
Returns feature parser for given example batch using features info.
[ "Returns", "feature", "parser", "for", "given", "example", "batch", "using", "features", "info", "." ]
def _get_feature_ops_from_example(self, examples_batch): """Returns feature parser for given example batch using features info. This function requires `fit()` has been called. Args: examples_batch: batch of tf.Example Returns: features: `Tensor` or `dict` of `Tensor` objects. Raises: ValueError: If `_features_info` attribute is not available (usually because `fit()` has not been called). """ if self._features_info is None: raise ValueError('Features information missing, was fit() ever called?') return tensor_signature.create_example_parser_from_signatures( self._features_info, examples_batch)
[ "def", "_get_feature_ops_from_example", "(", "self", ",", "examples_batch", ")", ":", "if", "self", ".", "_features_info", "is", "None", ":", "raise", "ValueError", "(", "'Features information missing, was fit() ever called?'", ")", "return", "tensor_signature", ".", "create_example_parser_from_signatures", "(", "self", ".", "_features_info", ",", "examples_batch", ")" ]
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/estimator.py#L400-L418
nyuwireless-unipd/ns3-mmwave
4ff9e87e8079764e04cbeccd8e85bff15ae16fb3
src/visualizer/visualizer/core.py
python
SimulationThread.set_nodes_of_interest
(self, nodes)
! Set nodes of interest function. @param self: class object. @param nodes: class object. @return
! Set nodes of interest function.
[ "!", "Set", "nodes", "of", "interest", "function", "." ]
def set_nodes_of_interest(self, nodes): """! Set nodes of interest function. @param self: class object. @param nodes: class object. @return """ self.lock.acquire() try: self.sim_helper.SetNodesOfInterest(nodes) finally: self.lock.release()
[ "def", "set_nodes_of_interest", "(", "self", ",", "nodes", ")", ":", "self", ".", "lock", ".", "acquire", "(", ")", "try", ":", "self", ".", "sim_helper", ".", "SetNodesOfInterest", "(", "nodes", ")", "finally", ":", "self", ".", "lock", ".", "release", "(", ")" ]
https://github.com/nyuwireless-unipd/ns3-mmwave/blob/4ff9e87e8079764e04cbeccd8e85bff15ae16fb3/src/visualizer/visualizer/core.py#L637-L649
Alexhuszagh/rust-lexical
01fcdcf8efc8850edb35d8fc65fd5f31bd0981a0
lexical-util/etc/div128.py
python
is_valid
(x)
return ( x <= u64_max and (u128_max / (x**2)) < x )
Determine if the power is valid.
Determine if the power is valid.
[ "Determine", "if", "the", "power", "is", "valid", "." ]
def is_valid(x): '''Determine if the power is valid.''' return ( x <= u64_max and (u128_max / (x**2)) < x )
[ "def", "is_valid", "(", "x", ")", ":", "return", "(", "x", "<=", "u64_max", "and", "(", "u128_max", "/", "(", "x", "**", "2", ")", ")", "<", "x", ")" ]
https://github.com/Alexhuszagh/rust-lexical/blob/01fcdcf8efc8850edb35d8fc65fd5f31bd0981a0/lexical-util/etc/div128.py#L21-L26
mongodb/mongo
d8ff665343ad29cf286ee2cf4a1960d29371937b
src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/icc.py
python
generate
(env)
Add Builders and construction variables for the OS/2 to an Environment.
Add Builders and construction variables for the OS/2 to an Environment.
[ "Add", "Builders", "and", "construction", "variables", "for", "the", "OS", "/", "2", "to", "an", "Environment", "." ]
def generate(env): """Add Builders and construction variables for the OS/2 to an Environment.""" cc.generate(env) env['CC'] = 'icc' env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET' env['CXXCOM'] = '$CXX $CXXFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET' env['CPPDEFPREFIX'] = '/D' env['CPPDEFSUFFIX'] = '' env['INCPREFIX'] = '/I' env['INCSUFFIX'] = '' env['CFILESUFFIX'] = '.c' env['CXXFILESUFFIX'] = '.cc'
[ "def", "generate", "(", "env", ")", ":", "cc", ".", "generate", "(", "env", ")", "env", "[", "'CC'", "]", "=", "'icc'", "env", "[", "'CCCOM'", "]", "=", "'$CC $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET'", "env", "[", "'CXXCOM'", "]", "=", "'$CXX $CXXFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET'", "env", "[", "'CPPDEFPREFIX'", "]", "=", "'/D'", "env", "[", "'CPPDEFSUFFIX'", "]", "=", "''", "env", "[", "'INCPREFIX'", "]", "=", "'/I'", "env", "[", "'INCSUFFIX'", "]", "=", "''", "env", "[", "'CFILESUFFIX'", "]", "=", "'.c'", "env", "[", "'CXXFILESUFFIX'", "]", "=", "'.cc'" ]
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/icc.py#L38-L50
freeorion/freeorion
c266a40eccd3a99a17de8fe57c36ef6ba3771665
default/python/universe_generation/galaxy.py
python
recalc_universe_width
(positions)
return actual_width, new_positions
Recalculates the universe width. This is done by shifting all positions by a delta so too much "extra space" beyond the uppermost, lowermost, leftmost and rightmost positions is cropped, and adjust the universe width accordingly. Returns the new universe width and the recalculated positions.
Recalculates the universe width. This is done by shifting all positions by a delta so too much "extra space" beyond the uppermost, lowermost, leftmost and rightmost positions is cropped, and adjust the universe width accordingly.
[ "Recalculates", "the", "universe", "width", ".", "This", "is", "done", "by", "shifting", "all", "positions", "by", "a", "delta", "so", "too", "much", "extra", "space", "beyond", "the", "uppermost", "lowermost", "leftmost", "and", "rightmost", "positions", "is", "cropped", "and", "adjust", "the", "universe", "width", "accordingly", "." ]
def recalc_universe_width(positions): """ Recalculates the universe width. This is done by shifting all positions by a delta so too much "extra space" beyond the uppermost, lowermost, leftmost and rightmost positions is cropped, and adjust the universe width accordingly. Returns the new universe width and the recalculated positions. """ print("Recalculating universe width...") # first, get the uppermost, lowermost, leftmost and rightmost positions # (these are those with their x or y coordinate closest to or farthest away from the x or y axis) min_x = min(positions, key=lambda p: p[0])[0] min_y = min(positions, key=lambda p: p[1])[1] max_x = max(positions, key=lambda p: p[0])[0] max_y = max(positions, key=lambda p: p[1])[1] print("...the leftmost system position is at x coordinate {}".format(min_x)) print("...the uppermost system position is at y coordinate {}".format(min_y)) print("...the rightmost system position is at x coordinate {}".format(max_x)) print("...the lowermost system position is at y coordinate {}".format(max_y)) # calculate the actual universe width by determining the width and height of an rectangle that encompasses all # positions, and take the greater of the two as the new actual width for the universe # also add a constant value to the width so we have some small space around width = max_x - min_x height = max_y - min_y actual_width = max(width, height) + 20.0 print("...recalculated universe width: {}".format(actual_width)) # shift all positions so the entire map is centered in a quadratic box of the width we just calculated # this box defines the extends of our universe delta_x = ((actual_width - width) / 2) - min_x delta_y = ((actual_width - height) / 2) - min_y print("...shifting all system positions by {}/{}".format(delta_x, delta_y)) new_positions = [(p[0] + delta_x, p[1] + delta_y) for p in positions] print("...the leftmost system position is now at x coordinate {}".format(min(new_positions, key=lambda p: p[0])[0])) print( "...the uppermost system position is now at y coordinate {}".format(min(new_positions, key=lambda p: p[1])[1]) ) print( "...the rightmost system position is now at x coordinate {}".format(max(new_positions, key=lambda p: p[0])[0]) ) print( "...the lowermost system position is now at y coordinate {}".format(max(new_positions, key=lambda p: p[1])[1]) ) return actual_width, new_positions
[ "def", "recalc_universe_width", "(", "positions", ")", ":", "print", "(", "\"Recalculating universe width...\"", ")", "# first, get the uppermost, lowermost, leftmost and rightmost positions", "# (these are those with their x or y coordinate closest to or farthest away from the x or y axis)", "min_x", "=", "min", "(", "positions", ",", "key", "=", "lambda", "p", ":", "p", "[", "0", "]", ")", "[", "0", "]", "min_y", "=", "min", "(", "positions", ",", "key", "=", "lambda", "p", ":", "p", "[", "1", "]", ")", "[", "1", "]", "max_x", "=", "max", "(", "positions", ",", "key", "=", "lambda", "p", ":", "p", "[", "0", "]", ")", "[", "0", "]", "max_y", "=", "max", "(", "positions", ",", "key", "=", "lambda", "p", ":", "p", "[", "1", "]", ")", "[", "1", "]", "print", "(", "\"...the leftmost system position is at x coordinate {}\"", ".", "format", "(", "min_x", ")", ")", "print", "(", "\"...the uppermost system position is at y coordinate {}\"", ".", "format", "(", "min_y", ")", ")", "print", "(", "\"...the rightmost system position is at x coordinate {}\"", ".", "format", "(", "max_x", ")", ")", "print", "(", "\"...the lowermost system position is at y coordinate {}\"", ".", "format", "(", "max_y", ")", ")", "# calculate the actual universe width by determining the width and height of an rectangle that encompasses all", "# positions, and take the greater of the two as the new actual width for the universe", "# also add a constant value to the width so we have some small space around", "width", "=", "max_x", "-", "min_x", "height", "=", "max_y", "-", "min_y", "actual_width", "=", "max", "(", "width", ",", "height", ")", "+", "20.0", "print", "(", "\"...recalculated universe width: {}\"", ".", "format", "(", "actual_width", ")", ")", "# shift all positions so the entire map is centered in a quadratic box of the width we just calculated", "# this box defines the extends of our universe", "delta_x", "=", "(", "(", "actual_width", "-", "width", ")", "/", "2", ")", "-", "min_x", "delta_y", "=", "(", "(", "actual_width", "-", "height", ")", "/", "2", ")", "-", "min_y", "print", "(", "\"...shifting all system positions by {}/{}\"", ".", "format", "(", "delta_x", ",", "delta_y", ")", ")", "new_positions", "=", "[", "(", "p", "[", "0", "]", "+", "delta_x", ",", "p", "[", "1", "]", "+", "delta_y", ")", "for", "p", "in", "positions", "]", "print", "(", "\"...the leftmost system position is now at x coordinate {}\"", ".", "format", "(", "min", "(", "new_positions", ",", "key", "=", "lambda", "p", ":", "p", "[", "0", "]", ")", "[", "0", "]", ")", ")", "print", "(", "\"...the uppermost system position is now at y coordinate {}\"", ".", "format", "(", "min", "(", "new_positions", ",", "key", "=", "lambda", "p", ":", "p", "[", "1", "]", ")", "[", "1", "]", ")", ")", "print", "(", "\"...the rightmost system position is now at x coordinate {}\"", ".", "format", "(", "max", "(", "new_positions", ",", "key", "=", "lambda", "p", ":", "p", "[", "0", "]", ")", "[", "0", "]", ")", ")", "print", "(", "\"...the lowermost system position is now at y coordinate {}\"", ".", "format", "(", "max", "(", "new_positions", ",", "key", "=", "lambda", "p", ":", "p", "[", "1", "]", ")", "[", "1", "]", ")", ")", "return", "actual_width", ",", "new_positions" ]
https://github.com/freeorion/freeorion/blob/c266a40eccd3a99a17de8fe57c36ef6ba3771665/default/python/universe_generation/galaxy.py#L738-L784
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/ntpath.py
python
islink
(path)
return False
Test for symbolic link. On WindowsNT/95 and OS/2 always returns false
Test for symbolic link. On WindowsNT/95 and OS/2 always returns false
[ "Test", "for", "symbolic", "link", ".", "On", "WindowsNT", "/", "95", "and", "OS", "/", "2", "always", "returns", "false" ]
def islink(path): """Test for symbolic link. On WindowsNT/95 and OS/2 always returns false """ return False
[ "def", "islink", "(", "path", ")", ":", "return", "False" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/ntpath.py#L210-L214
weolar/miniblink49
1c4678db0594a4abde23d3ebbcc7cd13c3170777
third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/_stream_hybi.py
python
create_length_header
(length, mask)
Creates a length header. Args: length: Frame length. Must be less than 2^63. mask: Mask bit. Must be boolean. Raises: ValueError: when bad data is given.
Creates a length header.
[ "Creates", "a", "length", "header", "." ]
def create_length_header(length, mask): """Creates a length header. Args: length: Frame length. Must be less than 2^63. mask: Mask bit. Must be boolean. Raises: ValueError: when bad data is given. """ if mask: mask_bit = 1 << 7 else: mask_bit = 0 if length < 0: raise ValueError('length must be non negative integer') elif length <= 125: return chr(mask_bit | length) elif length < (1 << 16): return chr(mask_bit | 126) + struct.pack('!H', length) elif length < (1 << 63): return chr(mask_bit | 127) + struct.pack('!Q', length) else: raise ValueError('Payload is too big for one frame')
[ "def", "create_length_header", "(", "length", ",", "mask", ")", ":", "if", "mask", ":", "mask_bit", "=", "1", "<<", "7", "else", ":", "mask_bit", "=", "0", "if", "length", "<", "0", ":", "raise", "ValueError", "(", "'length must be non negative integer'", ")", "elif", "length", "<=", "125", ":", "return", "chr", "(", "mask_bit", "|", "length", ")", "elif", "length", "<", "(", "1", "<<", "16", ")", ":", "return", "chr", "(", "mask_bit", "|", "126", ")", "+", "struct", ".", "pack", "(", "'!H'", ",", "length", ")", "elif", "length", "<", "(", "1", "<<", "63", ")", ":", "return", "chr", "(", "mask_bit", "|", "127", ")", "+", "struct", ".", "pack", "(", "'!Q'", ",", "length", ")", "else", ":", "raise", "ValueError", "(", "'Payload is too big for one frame'", ")" ]
https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/_stream_hybi.py#L74-L99
microsoft/CNTK
e9396480025b9ca457d26b6f33dd07c474c6aa04
bindings/python/cntk/ops/__init__.py
python
reduce_max
(x, axis=None, keepdims=True, name='')
return reduce_max(x, axis, keepdims, name)
Computes the max of the input tensor's elements across a specified axis or a list of specified axes. Example: >>> # create 3x2x2 matrix in a sequence of length 1 in a batch of one sample >>> data = np.array([[[5,1], [20,2]],[[30,1], [40,2]],[[55,1], [60,2]]], dtype=np.float32) >>> C.reduce_max(data, 0).eval().round(4) array([[[ 55., 1.], [ 60., 2.]]], dtype=float32) >>> C.reduce_max(data, 1).eval().round(4) array([[[ 20., 2.]], <BLANKLINE> [[ 40., 2.]], <BLANKLINE> [[ 60., 2.]]], dtype=float32) >>> C.reduce_max(data, (0,2)).eval().round(4) array([[[ 55.], [ 60.]]], dtype=float32) >>> x = C.input_variable((2,2)) >>> C.reduce_max( x * 1.0, (C.Axis.default_batch_axis(), 1)).eval({x: data}).round(4) array([[ 55.], [ 60.]], dtype=float32) Args: x: input tensor axis (int or :class:`~cntk.axis.Axis` or a :obj:`list` or :obj:`tuple` of int or :class:`~cntk.axis.Axis`): axis along which the reduction will be performed keepdims (boolean): Keep the reduced dimension or not, default True mean keep reduced dimension name (str): the name of the Function instance in the network Returns: :class:`~cntk.ops.functions.Function` Note that CNTK keeps the shape of the resulting tensors when reducing over multiple static axes.
Computes the max of the input tensor's elements across a specified axis or a list of specified axes.
[ "Computes", "the", "max", "of", "the", "input", "tensor", "s", "elements", "across", "a", "specified", "axis", "or", "a", "list", "of", "specified", "axes", "." ]
def reduce_max(x, axis=None, keepdims=True, name=''): ''' Computes the max of the input tensor's elements across a specified axis or a list of specified axes. Example: >>> # create 3x2x2 matrix in a sequence of length 1 in a batch of one sample >>> data = np.array([[[5,1], [20,2]],[[30,1], [40,2]],[[55,1], [60,2]]], dtype=np.float32) >>> C.reduce_max(data, 0).eval().round(4) array([[[ 55., 1.], [ 60., 2.]]], dtype=float32) >>> C.reduce_max(data, 1).eval().round(4) array([[[ 20., 2.]], <BLANKLINE> [[ 40., 2.]], <BLANKLINE> [[ 60., 2.]]], dtype=float32) >>> C.reduce_max(data, (0,2)).eval().round(4) array([[[ 55.], [ 60.]]], dtype=float32) >>> x = C.input_variable((2,2)) >>> C.reduce_max( x * 1.0, (C.Axis.default_batch_axis(), 1)).eval({x: data}).round(4) array([[ 55.], [ 60.]], dtype=float32) Args: x: input tensor axis (int or :class:`~cntk.axis.Axis` or a :obj:`list` or :obj:`tuple` of int or :class:`~cntk.axis.Axis`): axis along which the reduction will be performed keepdims (boolean): Keep the reduced dimension or not, default True mean keep reduced dimension name (str): the name of the Function instance in the network Returns: :class:`~cntk.ops.functions.Function` Note that CNTK keeps the shape of the resulting tensors when reducing over multiple static axes. ''' from cntk.cntk_py import reduce_max x = sanitize_input(x) axis = sanitize_multi_axis_reduction_list(axis) return reduce_max(x, axis, keepdims, name)
[ "def", "reduce_max", "(", "x", ",", "axis", "=", "None", ",", "keepdims", "=", "True", ",", "name", "=", "''", ")", ":", "from", "cntk", ".", "cntk_py", "import", "reduce_max", "x", "=", "sanitize_input", "(", "x", ")", "axis", "=", "sanitize_multi_axis_reduction_list", "(", "axis", ")", "return", "reduce_max", "(", "x", ",", "axis", ",", "keepdims", ",", "name", ")" ]
https://github.com/microsoft/CNTK/blob/e9396480025b9ca457d26b6f33dd07c474c6aa04/bindings/python/cntk/ops/__init__.py#L3106-L3146
pristineio/webrtc-mirror
7a5bcdffaab90a05bc1146b2b1ea71c004e54d71
tools_webrtc/sslroots/generate_sslroots.py
python
main
()
The main entrypoint.
The main entrypoint.
[ "The", "main", "entrypoint", "." ]
def main(): """The main entrypoint.""" parser = OptionParser('usage %prog FILE') parser.add_option('-v', '--verbose', dest='verbose', action='store_true') parser.add_option('-f', '--full_cert', dest='full_cert', action='store_true') options, args = parser.parse_args() if len(args) < 1: parser.error('No crt file specified.') return root_dir = _SplitCrt(args[0], options) _GenCFiles(root_dir, options) _Cleanup(root_dir)
[ "def", "main", "(", ")", ":", "parser", "=", "OptionParser", "(", "'usage %prog FILE'", ")", "parser", ".", "add_option", "(", "'-v'", ",", "'--verbose'", ",", "dest", "=", "'verbose'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_option", "(", "'-f'", ",", "'--full_cert'", ",", "dest", "=", "'full_cert'", ",", "action", "=", "'store_true'", ")", "options", ",", "args", "=", "parser", ".", "parse_args", "(", ")", "if", "len", "(", "args", ")", "<", "1", ":", "parser", ".", "error", "(", "'No crt file specified.'", ")", "return", "root_dir", "=", "_SplitCrt", "(", "args", "[", "0", "]", ",", "options", ")", "_GenCFiles", "(", "root_dir", ",", "options", ")", "_Cleanup", "(", "root_dir", ")" ]
https://github.com/pristineio/webrtc-mirror/blob/7a5bcdffaab90a05bc1146b2b1ea71c004e54d71/tools_webrtc/sslroots/generate_sslroots.py#L43-L54
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/tensorflow/contrib/slim/python/slim/nets/alexnet.py
python
alexnet_v2
(inputs, num_classes=1000, dropout_keep_prob=0.5, is_training=True, spatial_squeeze=True, scope='alexnet_v2')
AlexNet version 2. Described in: http://arxiv.org/pdf/1404.5997v2.pdf Parameters from: github.com/akrizhevsky/cuda-convnet2/blob/master/layers/ layers-imagenet-1gpu.cfg Note: All the fully_connected layers have been transformed to conv2d layers. To use in classification mode, resize input to 224x224. To use in fully convolutional mode, set spatial_squeeze to false. The LRN layers have been removed and change the initializers from random_normal_initializer to xavier_initializer. Args: inputs: a tensor of size [batch_size, height, width, channels]. num_classes: number of predicted classes. dropout_keep_prob: the probability that activations are kept in the dropout layers during training. is_training: whether or not the model is being trained. spatial_squeeze: whether or not should squeeze the spatial dimensions of the outputs. Useful to remove unnecessary dimensions for classification. scope: Optional scope for the variables. Returns: the last op containing the log predictions and end_points dict.
AlexNet version 2.
[ "AlexNet", "version", "2", "." ]
def alexnet_v2(inputs, num_classes=1000, dropout_keep_prob=0.5, is_training=True, spatial_squeeze=True, scope='alexnet_v2'): """AlexNet version 2. Described in: http://arxiv.org/pdf/1404.5997v2.pdf Parameters from: github.com/akrizhevsky/cuda-convnet2/blob/master/layers/ layers-imagenet-1gpu.cfg Note: All the fully_connected layers have been transformed to conv2d layers. To use in classification mode, resize input to 224x224. To use in fully convolutional mode, set spatial_squeeze to false. The LRN layers have been removed and change the initializers from random_normal_initializer to xavier_initializer. Args: inputs: a tensor of size [batch_size, height, width, channels]. num_classes: number of predicted classes. dropout_keep_prob: the probability that activations are kept in the dropout layers during training. is_training: whether or not the model is being trained. spatial_squeeze: whether or not should squeeze the spatial dimensions of the outputs. Useful to remove unnecessary dimensions for classification. scope: Optional scope for the variables. Returns: the last op containing the log predictions and end_points dict. """ with tf.variable_op_scope([inputs], scope, 'alexnet_v2') as sc: end_points_collection = sc.name + '_end_points' # Collect outputs for conv2d, fully_connected and max_pool2d. with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d], outputs_collections=[end_points_collection]): net = slim.conv2d(inputs, 64, [11, 11], 4, padding='VALID', scope='conv1') net = slim.max_pool2d(net, [3, 3], 2, scope='pool1') net = slim.conv2d(net, 192, [5, 5], scope='conv2') net = slim.max_pool2d(net, [3, 3], 2, scope='pool2') net = slim.conv2d(net, 384, [3, 3], scope='conv3') net = slim.conv2d(net, 384, [3, 3], scope='conv4') net = slim.conv2d(net, 256, [3, 3], scope='conv5') net = slim.max_pool2d(net, [3, 3], 2, scope='pool5') # Use conv2d instead of fully_connected layers. with slim.arg_scope([slim.conv2d], weights_initializer=trunc_normal(0.005), biases_initializer=tf.constant_initializer(0.1)): net = slim.conv2d(net, 4096, [5, 5], padding='VALID', scope='fc6') net = slim.dropout(net, dropout_keep_prob, is_training=is_training, scope='dropout6') net = slim.conv2d(net, 4096, [1, 1], scope='fc7') net = slim.dropout(net, dropout_keep_prob, is_training=is_training, scope='dropout7') net = slim.conv2d(net, num_classes, [1, 1], activation_fn=None, normalizer_fn=None, biases_initializer=tf.zeros_initializer, scope='fc8') # Convert end_points_collection into a end_point dict. end_points = dict(tf.get_collection(end_points_collection)) if spatial_squeeze: net = tf.squeeze(net, [1, 2], name='fc8/squeezed') end_points[sc.name + '/fc8'] = net return net, end_points
[ "def", "alexnet_v2", "(", "inputs", ",", "num_classes", "=", "1000", ",", "dropout_keep_prob", "=", "0.5", ",", "is_training", "=", "True", ",", "spatial_squeeze", "=", "True", ",", "scope", "=", "'alexnet_v2'", ")", ":", "with", "tf", ".", "variable_op_scope", "(", "[", "inputs", "]", ",", "scope", ",", "'alexnet_v2'", ")", "as", "sc", ":", "end_points_collection", "=", "sc", ".", "name", "+", "'_end_points'", "# Collect outputs for conv2d, fully_connected and max_pool2d.", "with", "slim", ".", "arg_scope", "(", "[", "slim", ".", "conv2d", ",", "slim", ".", "fully_connected", ",", "slim", ".", "max_pool2d", "]", ",", "outputs_collections", "=", "[", "end_points_collection", "]", ")", ":", "net", "=", "slim", ".", "conv2d", "(", "inputs", ",", "64", ",", "[", "11", ",", "11", "]", ",", "4", ",", "padding", "=", "'VALID'", ",", "scope", "=", "'conv1'", ")", "net", "=", "slim", ".", "max_pool2d", "(", "net", ",", "[", "3", ",", "3", "]", ",", "2", ",", "scope", "=", "'pool1'", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "192", ",", "[", "5", ",", "5", "]", ",", "scope", "=", "'conv2'", ")", "net", "=", "slim", ".", "max_pool2d", "(", "net", ",", "[", "3", ",", "3", "]", ",", "2", ",", "scope", "=", "'pool2'", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "384", ",", "[", "3", ",", "3", "]", ",", "scope", "=", "'conv3'", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "384", ",", "[", "3", ",", "3", "]", ",", "scope", "=", "'conv4'", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "256", ",", "[", "3", ",", "3", "]", ",", "scope", "=", "'conv5'", ")", "net", "=", "slim", ".", "max_pool2d", "(", "net", ",", "[", "3", ",", "3", "]", ",", "2", ",", "scope", "=", "'pool5'", ")", "# Use conv2d instead of fully_connected layers.", "with", "slim", ".", "arg_scope", "(", "[", "slim", ".", "conv2d", "]", ",", "weights_initializer", "=", "trunc_normal", "(", "0.005", ")", ",", "biases_initializer", "=", "tf", ".", "constant_initializer", "(", "0.1", ")", ")", ":", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "4096", ",", "[", "5", ",", "5", "]", ",", "padding", "=", "'VALID'", ",", "scope", "=", "'fc6'", ")", "net", "=", "slim", ".", "dropout", "(", "net", ",", "dropout_keep_prob", ",", "is_training", "=", "is_training", ",", "scope", "=", "'dropout6'", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "4096", ",", "[", "1", ",", "1", "]", ",", "scope", "=", "'fc7'", ")", "net", "=", "slim", ".", "dropout", "(", "net", ",", "dropout_keep_prob", ",", "is_training", "=", "is_training", ",", "scope", "=", "'dropout7'", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "num_classes", ",", "[", "1", ",", "1", "]", ",", "activation_fn", "=", "None", ",", "normalizer_fn", "=", "None", ",", "biases_initializer", "=", "tf", ".", "zeros_initializer", ",", "scope", "=", "'fc8'", ")", "# Convert end_points_collection into a end_point dict.", "end_points", "=", "dict", "(", "tf", ".", "get_collection", "(", "end_points_collection", ")", ")", "if", "spatial_squeeze", ":", "net", "=", "tf", ".", "squeeze", "(", "net", ",", "[", "1", ",", "2", "]", ",", "name", "=", "'fc8/squeezed'", ")", "end_points", "[", "sc", ".", "name", "+", "'/fc8'", "]", "=", "net", "return", "net", ",", "end_points" ]
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/slim/python/slim/nets/alexnet.py#L51-L120
Kitware/ParaView
f760af9124ff4634b23ebbeab95a4f56e0261955
ThirdParty/cinema/paraview/tpl/cinema_python/database/file_store.py
python
FileStore.load
(self)
loads an existing filestore
loads an existing filestore
[ "loads", "an", "existing", "filestore" ]
def load(self): """loads an existing filestore""" super(FileStore, self).load() with open(self.__dbfilename, mode="r") as file: info_json = json.load(file) if 'arguments' in info_json: self._set_parameter_list(info_json['arguments']) elif 'parameter_list' in info_json: self._set_parameter_list(info_json['parameter_list']) else: print("Error I can't read that file") exit() self.metadata = info_json['metadata'] self.filename_pattern = info_json['name_pattern'] a = {} if 'associations' in info_json: a = info_json['associations'] elif 'constraints' in info_json: a = info_json['constraints'] self._set_parameter_associations(a)
[ "def", "load", "(", "self", ")", ":", "super", "(", "FileStore", ",", "self", ")", ".", "load", "(", ")", "with", "open", "(", "self", ".", "__dbfilename", ",", "mode", "=", "\"r\"", ")", "as", "file", ":", "info_json", "=", "json", ".", "load", "(", "file", ")", "if", "'arguments'", "in", "info_json", ":", "self", ".", "_set_parameter_list", "(", "info_json", "[", "'arguments'", "]", ")", "elif", "'parameter_list'", "in", "info_json", ":", "self", ".", "_set_parameter_list", "(", "info_json", "[", "'parameter_list'", "]", ")", "else", ":", "print", "(", "\"Error I can't read that file\"", ")", "exit", "(", ")", "self", ".", "metadata", "=", "info_json", "[", "'metadata'", "]", "self", ".", "filename_pattern", "=", "info_json", "[", "'name_pattern'", "]", "a", "=", "{", "}", "if", "'associations'", "in", "info_json", ":", "a", "=", "info_json", "[", "'associations'", "]", "elif", "'constraints'", "in", "info_json", ":", "a", "=", "info_json", "[", "'constraints'", "]", "self", ".", "_set_parameter_associations", "(", "a", ")" ]
https://github.com/Kitware/ParaView/blob/f760af9124ff4634b23ebbeab95a4f56e0261955/ThirdParty/cinema/paraview/tpl/cinema_python/database/file_store.py#L45-L64
SequoiaDB/SequoiaDB
2894ed7e5bd6fe57330afc900cf76d0ff0df9f64
tools/server/php_linux/libxml2/lib/python2.4/site-packages/libxml2.py
python
xmlNode.freeNodeList
(self)
Free a node and all its siblings, this is a recursive behaviour, all the children are freed too.
Free a node and all its siblings, this is a recursive behaviour, all the children are freed too.
[ "Free", "a", "node", "and", "all", "its", "siblings", "this", "is", "a", "recursive", "behaviour", "all", "the", "children", "are", "freed", "too", "." ]
def freeNodeList(self): """Free a node and all its siblings, this is a recursive behaviour, all the children are freed too. """ libxml2mod.xmlFreeNodeList(self._o)
[ "def", "freeNodeList", "(", "self", ")", ":", "libxml2mod", ".", "xmlFreeNodeList", "(", "self", ".", "_o", ")" ]
https://github.com/SequoiaDB/SequoiaDB/blob/2894ed7e5bd6fe57330afc900cf76d0ff0df9f64/tools/server/php_linux/libxml2/lib/python2.4/site-packages/libxml2.py#L3184-L3187
mongodb/mongo
d8ff665343ad29cf286ee2cf4a1960d29371937b
src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Environment.py
python
Base._update
(self, dict)
Update an environment's values directly, bypassing the normal checks that occur when users try to set items.
Update an environment's values directly, bypassing the normal checks that occur when users try to set items.
[ "Update", "an", "environment", "s", "values", "directly", "bypassing", "the", "normal", "checks", "that", "occur", "when", "users", "try", "to", "set", "items", "." ]
def _update(self, dict): """Update an environment's values directly, bypassing the normal checks that occur when users try to set items. """ self._dict.update(dict)
[ "def", "_update", "(", "self", ",", "dict", ")", ":", "self", ".", "_dict", ".", "update", "(", "dict", ")" ]
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Environment.py#L1127-L1131
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/contrib/layers/python/layers/regularizers.py
python
l1_l2_regularizer
(scale_l1=1.0, scale_l2=1.0, scope=None)
return sum_regularizer([l1_regularizer(scale_l1), l2_regularizer(scale_l2)], scope=scope)
Returns a function that can be used to apply L1 L2 regularizations. Args: scale_l1: A scalar multiplier `Tensor` for L1 regularization. scale_l2: A scalar multiplier `Tensor` for L2 regularization. scope: An optional scope name. Returns: A function with signature `l1_l2(weights)` that applies a weighted sum of L1 L2 regularization. Raises: ValueError: If scale is negative or if scale is not a float.
Returns a function that can be used to apply L1 L2 regularizations.
[ "Returns", "a", "function", "that", "can", "be", "used", "to", "apply", "L1", "L2", "regularizations", "." ]
def l1_l2_regularizer(scale_l1=1.0, scale_l2=1.0, scope=None): """Returns a function that can be used to apply L1 L2 regularizations. Args: scale_l1: A scalar multiplier `Tensor` for L1 regularization. scale_l2: A scalar multiplier `Tensor` for L2 regularization. scope: An optional scope name. Returns: A function with signature `l1_l2(weights)` that applies a weighted sum of L1 L2 regularization. Raises: ValueError: If scale is negative or if scale is not a float. """ if isinstance(scale_l1, numbers.Integral): raise ValueError('scale_l1 cannot be an integer: %s' % (scale_l1,)) if isinstance(scale_l2, numbers.Integral): raise ValueError('scale_l2 cannot be an integer: %s' % (scale_l2,)) scope = scope or 'l1_l2_regularizer' if scale_l1 == 0.: return l2_regularizer(scale_l2, scope) if scale_l2 == 0.: return l1_regularizer(scale_l1, scope) return sum_regularizer([l1_regularizer(scale_l1), l2_regularizer(scale_l2)], scope=scope)
[ "def", "l1_l2_regularizer", "(", "scale_l1", "=", "1.0", ",", "scale_l2", "=", "1.0", ",", "scope", "=", "None", ")", ":", "if", "isinstance", "(", "scale_l1", ",", "numbers", ".", "Integral", ")", ":", "raise", "ValueError", "(", "'scale_l1 cannot be an integer: %s'", "%", "(", "scale_l1", ",", ")", ")", "if", "isinstance", "(", "scale_l2", ",", "numbers", ".", "Integral", ")", ":", "raise", "ValueError", "(", "'scale_l2 cannot be an integer: %s'", "%", "(", "scale_l2", ",", ")", ")", "scope", "=", "scope", "or", "'l1_l2_regularizer'", "if", "scale_l1", "==", "0.", ":", "return", "l2_regularizer", "(", "scale_l2", ",", "scope", ")", "if", "scale_l2", "==", "0.", ":", "return", "l1_regularizer", "(", "scale_l1", ",", "scope", ")", "return", "sum_regularizer", "(", "[", "l1_regularizer", "(", "scale_l1", ")", ",", "l2_regularizer", "(", "scale_l2", ")", "]", ",", "scope", "=", "scope", ")" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/layers/python/layers/regularizers.py#L112-L138
Komnomnomnom/swigibpy
cfd307fdbfaffabc69a2dc037538d7e34a8b8daf
swigibpy.py
python
SwigPyIterator.advance
(self, n)
return _swigibpy.SwigPyIterator_advance(self, n)
advance(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator
advance(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator
[ "advance", "(", "SwigPyIterator", "self", "ptrdiff_t", "n", ")", "-", ">", "SwigPyIterator" ]
def advance(self, n): """advance(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator""" return _swigibpy.SwigPyIterator_advance(self, n)
[ "def", "advance", "(", "self", ",", "n", ")", ":", "return", "_swigibpy", ".", "SwigPyIterator_advance", "(", "self", ",", "n", ")" ]
https://github.com/Komnomnomnom/swigibpy/blob/cfd307fdbfaffabc69a2dc037538d7e34a8b8daf/swigibpy.py#L178-L180
Polidea/SiriusObfuscator
b0e590d8130e97856afe578869b83a209e2b19be
SymbolExtractorAndRenamer/compiler-rt/lib/sanitizer_common/scripts/cpplint.py
python
_CppLintState.SetCountingStyle
(self, counting_style)
Sets the module's counting options.
Sets the module's counting options.
[ "Sets", "the", "module", "s", "counting", "options", "." ]
def SetCountingStyle(self, counting_style): """Sets the module's counting options.""" self.counting = counting_style
[ "def", "SetCountingStyle", "(", "self", ",", "counting_style", ")", ":", "self", ".", "counting", "=", "counting_style" ]
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/compiler-rt/lib/sanitizer_common/scripts/cpplint.py#L577-L579
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/scipy/signal/windows.py
python
bohman
(M, sym=True)
return _truncate(w, needs_trunc)
Return a Bohman window. Parameters ---------- M : int Number of points in the output window. If zero or less, an empty array is returned. sym : bool, optional When True (default), generates a symmetric window, for use in filter design. When False, generates a periodic window, for use in spectral analysis. Returns ------- w : ndarray The window, with the maximum value normalized to 1 (though the value 1 does not appear if `M` is even and `sym` is True). Examples -------- Plot the window and its frequency response: >>> from scipy import signal >>> from scipy.fftpack import fft, fftshift >>> import matplotlib.pyplot as plt >>> window = signal.bohman(51) >>> plt.plot(window) >>> plt.title("Bohman window") >>> plt.ylabel("Amplitude") >>> plt.xlabel("Sample") >>> plt.figure() >>> A = fft(window, 2048) / (len(window)/2.0) >>> freq = np.linspace(-0.5, 0.5, len(A)) >>> response = 20 * np.log10(np.abs(fftshift(A / abs(A).max()))) >>> plt.plot(freq, response) >>> plt.axis([-0.5, 0.5, -120, 0]) >>> plt.title("Frequency response of the Bohman window") >>> plt.ylabel("Normalized magnitude [dB]") >>> plt.xlabel("Normalized frequency [cycles per sample]")
Return a Bohman window.
[ "Return", "a", "Bohman", "window", "." ]
def bohman(M, sym=True): """Return a Bohman window. Parameters ---------- M : int Number of points in the output window. If zero or less, an empty array is returned. sym : bool, optional When True (default), generates a symmetric window, for use in filter design. When False, generates a periodic window, for use in spectral analysis. Returns ------- w : ndarray The window, with the maximum value normalized to 1 (though the value 1 does not appear if `M` is even and `sym` is True). Examples -------- Plot the window and its frequency response: >>> from scipy import signal >>> from scipy.fftpack import fft, fftshift >>> import matplotlib.pyplot as plt >>> window = signal.bohman(51) >>> plt.plot(window) >>> plt.title("Bohman window") >>> plt.ylabel("Amplitude") >>> plt.xlabel("Sample") >>> plt.figure() >>> A = fft(window, 2048) / (len(window)/2.0) >>> freq = np.linspace(-0.5, 0.5, len(A)) >>> response = 20 * np.log10(np.abs(fftshift(A / abs(A).max()))) >>> plt.plot(freq, response) >>> plt.axis([-0.5, 0.5, -120, 0]) >>> plt.title("Frequency response of the Bohman window") >>> plt.ylabel("Normalized magnitude [dB]") >>> plt.xlabel("Normalized frequency [cycles per sample]") """ if _len_guards(M): return np.ones(M) M, needs_trunc = _extend(M, sym) fac = np.abs(np.linspace(-1, 1, M)[1:-1]) w = (1 - fac) * np.cos(np.pi * fac) + 1.0 / np.pi * np.sin(np.pi * fac) w = np.r_[0, w, 0] return _truncate(w, needs_trunc)
[ "def", "bohman", "(", "M", ",", "sym", "=", "True", ")", ":", "if", "_len_guards", "(", "M", ")", ":", "return", "np", ".", "ones", "(", "M", ")", "M", ",", "needs_trunc", "=", "_extend", "(", "M", ",", "sym", ")", "fac", "=", "np", ".", "abs", "(", "np", ".", "linspace", "(", "-", "1", ",", "1", ",", "M", ")", "[", "1", ":", "-", "1", "]", ")", "w", "=", "(", "1", "-", "fac", ")", "*", "np", ".", "cos", "(", "np", ".", "pi", "*", "fac", ")", "+", "1.0", "/", "np", ".", "pi", "*", "np", ".", "sin", "(", "np", ".", "pi", "*", "fac", ")", "w", "=", "np", ".", "r_", "[", "0", ",", "w", ",", "0", "]", "return", "_truncate", "(", "w", ",", "needs_trunc", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/signal/windows.py#L301-L353
quarkslab/arybo
89d9a4266fa51c1a560f6c4a66f65d1ffde5f093
arybo/lib/mba_if.py
python
simplify
(e)
return __call_impl_func(simplify_vec, e)
Simplify the expression or variable e.
Simplify the expression or variable e.
[ "Simplify", "the", "expression", "or", "variable", "e", "." ]
def simplify(e): ''' Simplify the expression or variable e. ''' return __call_impl_func(simplify_vec, e)
[ "def", "simplify", "(", "e", ")", ":", "return", "__call_impl_func", "(", "simplify_vec", ",", "e", ")" ]
https://github.com/quarkslab/arybo/blob/89d9a4266fa51c1a560f6c4a66f65d1ffde5f093/arybo/lib/mba_if.py#L60-L62
Polidea/SiriusObfuscator
b0e590d8130e97856afe578869b83a209e2b19be
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
python
SBSourceManager.__init__
(self, *args)
__init__(self, SBSourceManager rhs) -> SBSourceManager
__init__(self, SBSourceManager rhs) -> SBSourceManager
[ "__init__", "(", "self", "SBSourceManager", "rhs", ")", "-", ">", "SBSourceManager" ]
def __init__(self, *args): """__init__(self, SBSourceManager rhs) -> SBSourceManager""" this = _lldb.new_SBSourceManager(*args) try: self.this.append(this) except: self.this = this
[ "def", "__init__", "(", "self", ",", "*", "args", ")", ":", "this", "=", "_lldb", ".", "new_SBSourceManager", "(", "*", "args", ")", "try", ":", "self", ".", "this", ".", "append", "(", "this", ")", "except", ":", "self", ".", "this", "=", "this" ]
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L7848-L7852
deepmodeling/deepmd-kit
159e45d248b0429844fb6a8cb3b3a201987c8d79
deepmd/env.py
python
get_tf_default_nthreads
()
return int(os.environ.get("TF_INTRA_OP_PARALLELISM_THREADS", "0")), int( os.environ.get("TF_INTER_OP_PARALLELISM_THREADS", "0") )
Get TF paralellism settings. Returns ------- Tuple[int, int] number of `TF_INTRA_OP_PARALLELISM_THREADS` and `TF_INTER_OP_PARALLELISM_THREADS`
Get TF paralellism settings.
[ "Get", "TF", "paralellism", "settings", "." ]
def get_tf_default_nthreads() -> Tuple[int, int]: """Get TF paralellism settings. Returns ------- Tuple[int, int] number of `TF_INTRA_OP_PARALLELISM_THREADS` and `TF_INTER_OP_PARALLELISM_THREADS` """ return int(os.environ.get("TF_INTRA_OP_PARALLELISM_THREADS", "0")), int( os.environ.get("TF_INTER_OP_PARALLELISM_THREADS", "0") )
[ "def", "get_tf_default_nthreads", "(", ")", "->", "Tuple", "[", "int", ",", "int", "]", ":", "return", "int", "(", "os", ".", "environ", ".", "get", "(", "\"TF_INTRA_OP_PARALLELISM_THREADS\"", ",", "\"0\"", ")", ")", ",", "int", "(", "os", ".", "environ", ".", "get", "(", "\"TF_INTER_OP_PARALLELISM_THREADS\"", ",", "\"0\"", ")", ")" ]
https://github.com/deepmodeling/deepmd-kit/blob/159e45d248b0429844fb6a8cb3b3a201987c8d79/deepmd/env.py#L97-L108
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/setuptools/_vendor/pyparsing.py
python
withClass
(classname, namespace='')
return withAttribute(**{classattr : classname})
Simplified version of C{L{withAttribute}} when matching on a div class - made difficult because C{class} is a reserved word in Python. Example:: html = ''' <div> Some text <div class="grid">1 4 0 1 0</div> <div class="graph">1,3 2,3 1,1</div> <div>this &lt;div&gt; has no class</div> </div> ''' div,div_end = makeHTMLTags("div") div_grid = div().setParseAction(withClass("grid")) grid_expr = div_grid + SkipTo(div | div_end)("body") for grid_header in grid_expr.searchString(html): print(grid_header.body) div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) div_expr = div_any_type + SkipTo(div | div_end)("body") for div_header in div_expr.searchString(html): print(div_header.body) prints:: 1 4 0 1 0 1 4 0 1 0 1,3 2,3 1,1
Simplified version of C{L{withAttribute}} when matching on a div class - made difficult because C{class} is a reserved word in Python.
[ "Simplified", "version", "of", "C", "{", "L", "{", "withAttribute", "}}", "when", "matching", "on", "a", "div", "class", "-", "made", "difficult", "because", "C", "{", "class", "}", "is", "a", "reserved", "word", "in", "Python", "." ]
def withClass(classname, namespace=''): """ Simplified version of C{L{withAttribute}} when matching on a div class - made difficult because C{class} is a reserved word in Python. Example:: html = ''' <div> Some text <div class="grid">1 4 0 1 0</div> <div class="graph">1,3 2,3 1,1</div> <div>this &lt;div&gt; has no class</div> </div> ''' div,div_end = makeHTMLTags("div") div_grid = div().setParseAction(withClass("grid")) grid_expr = div_grid + SkipTo(div | div_end)("body") for grid_header in grid_expr.searchString(html): print(grid_header.body) div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) div_expr = div_any_type + SkipTo(div | div_end)("body") for div_header in div_expr.searchString(html): print(div_header.body) prints:: 1 4 0 1 0 1 4 0 1 0 1,3 2,3 1,1 """ classattr = "%s:class" % namespace if namespace else "class" return withAttribute(**{classattr : classname})
[ "def", "withClass", "(", "classname", ",", "namespace", "=", "''", ")", ":", "classattr", "=", "\"%s:class\"", "%", "namespace", "if", "namespace", "else", "\"class\"", "return", "withAttribute", "(", "*", "*", "{", "classattr", ":", "classname", "}", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/setuptools/_vendor/pyparsing.py#L4997-L5030
thalium/icebox
99d147d5b9269222225443ce171b4fd46d8985d4
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py
python
readerForDoc
(cur, URL, encoding, options)
return xmlTextReader(_obj=ret)
Create an xmltextReader for an XML in-memory document. The parsing flags @options are a combination of xmlParserOption.
Create an xmltextReader for an XML in-memory document. The parsing flags
[ "Create", "an", "xmltextReader", "for", "an", "XML", "in", "-", "memory", "document", ".", "The", "parsing", "flags" ]
def readerForDoc(cur, URL, encoding, options): """Create an xmltextReader for an XML in-memory document. The parsing flags @options are a combination of xmlParserOption. """ ret = libxml2mod.xmlReaderForDoc(cur, URL, encoding, options) if ret is None:raise treeError('xmlReaderForDoc() failed') return xmlTextReader(_obj=ret)
[ "def", "readerForDoc", "(", "cur", ",", "URL", ",", "encoding", ",", "options", ")", ":", "ret", "=", "libxml2mod", ".", "xmlReaderForDoc", "(", "cur", ",", "URL", ",", "encoding", ",", "options", ")", "if", "ret", "is", "None", ":", "raise", "treeError", "(", "'xmlReaderForDoc() failed'", ")", "return", "xmlTextReader", "(", "_obj", "=", "ret", ")" ]
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py#L1950-L1955
generalized-intelligence/GAAS
29ab17d3e8a4ba18edef3a57c36d8db6329fac73
deprecated/algorithms/sfm/OpenSfM/opensfm/io.py
python
point_to_json
(point)
return { 'color': list(point.color), 'coordinates': list(point.coordinates), 'reprojection_error': point.reprojection_error }
Write a point to a json object
Write a point to a json object
[ "Write", "a", "point", "to", "a", "json", "object" ]
def point_to_json(point): """ Write a point to a json object """ return { 'color': list(point.color), 'coordinates': list(point.coordinates), 'reprojection_error': point.reprojection_error }
[ "def", "point_to_json", "(", "point", ")", ":", "return", "{", "'color'", ":", "list", "(", "point", ".", "color", ")", ",", "'coordinates'", ":", "list", "(", "point", ".", "coordinates", ")", ",", "'reprojection_error'", ":", "point", ".", "reprojection_error", "}" ]
https://github.com/generalized-intelligence/GAAS/blob/29ab17d3e8a4ba18edef3a57c36d8db6329fac73/deprecated/algorithms/sfm/OpenSfM/opensfm/io.py#L297-L305
apiaryio/snowcrash
b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3
tools/gyp/tools/pretty_gyp.py
python
split_double_braces
(input)
return output
Masks out the quotes and comments, and then splits appropriate lines (lines that matche the double_*_brace re's above) before indenting them below. These are used to split lines which have multiple braces on them, so that the indentation looks prettier when all laid out (e.g. closing braces make a nice diagonal line).
Masks out the quotes and comments, and then splits appropriate lines (lines that matche the double_*_brace re's above) before indenting them below.
[ "Masks", "out", "the", "quotes", "and", "comments", "and", "then", "splits", "appropriate", "lines", "(", "lines", "that", "matche", "the", "double_", "*", "_brace", "re", "s", "above", ")", "before", "indenting", "them", "below", "." ]
def split_double_braces(input): """Masks out the quotes and comments, and then splits appropriate lines (lines that matche the double_*_brace re's above) before indenting them below. These are used to split lines which have multiple braces on them, so that the indentation looks prettier when all laid out (e.g. closing braces make a nice diagonal line). """ double_open_brace_re = re.compile(r'(.*?[\[\{\(,])(\s*)([\[\{\(])') double_close_brace_re = re.compile(r'(.*?[\]\}\)],?)(\s*)([\]\}\)])') masked_input = mask_quotes(input) masked_input = mask_comments(masked_input) (output, mask_output) = do_split(input, masked_input, double_open_brace_re) (output, mask_output) = do_split(output, mask_output, double_close_brace_re) return output
[ "def", "split_double_braces", "(", "input", ")", ":", "double_open_brace_re", "=", "re", ".", "compile", "(", "r'(.*?[\\[\\{\\(,])(\\s*)([\\[\\{\\(])'", ")", "double_close_brace_re", "=", "re", ".", "compile", "(", "r'(.*?[\\]\\}\\)],?)(\\s*)([\\]\\}\\)])'", ")", "masked_input", "=", "mask_quotes", "(", "input", ")", "masked_input", "=", "mask_comments", "(", "masked_input", ")", "(", "output", ",", "mask_output", ")", "=", "do_split", "(", "input", ",", "masked_input", ",", "double_open_brace_re", ")", "(", "output", ",", "mask_output", ")", "=", "do_split", "(", "output", ",", "mask_output", ",", "double_close_brace_re", ")", "return", "output" ]
https://github.com/apiaryio/snowcrash/blob/b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3/tools/gyp/tools/pretty_gyp.py#L62-L80
epam/Indigo
30e40b4b1eb9bae0207435a26cfcb81ddcc42be1
api/python/indigo/__init__.py
python
IndigoObject.iterateMatches
(self, query)
return self.dispatcher.IndigoObject( self.dispatcher, self.dispatcher._checkResult( Indigo._lib.indigoIterateMatches(self.id, query.id) ), )
Matcher method returns matches iterator Args: query (IndigoObject): query structure Returns: IndigoObject: matches iterator
Matcher method returns matches iterator
[ "Matcher", "method", "returns", "matches", "iterator" ]
def iterateMatches(self, query): """Matcher method returns matches iterator Args: query (IndigoObject): query structure Returns: IndigoObject: matches iterator """ self.dispatcher._setSessionId() return self.dispatcher.IndigoObject( self.dispatcher, self.dispatcher._checkResult( Indigo._lib.indigoIterateMatches(self.id, query.id) ), )
[ "def", "iterateMatches", "(", "self", ",", "query", ")", ":", "self", ".", "dispatcher", ".", "_setSessionId", "(", ")", "return", "self", ".", "dispatcher", ".", "IndigoObject", "(", "self", ".", "dispatcher", ",", "self", ".", "dispatcher", ".", "_checkResult", "(", "Indigo", ".", "_lib", ".", "indigoIterateMatches", "(", "self", ".", "id", ",", "query", ".", "id", ")", ")", ",", ")" ]
https://github.com/epam/Indigo/blob/30e40b4b1eb9bae0207435a26cfcb81ddcc42be1/api/python/indigo/__init__.py#L3896-L3911
cocos-creator/engine-native
984c4c9f5838253313b44ccd429bd8fac4ec8a6a
tools/bindings-generator/clang/cindex.py
python
CursorKind.is_unexposed
(self)
return conf.lib.clang_isUnexposed(self)
Test if this is an unexposed kind.
Test if this is an unexposed kind.
[ "Test", "if", "this", "is", "an", "unexposed", "kind", "." ]
def is_unexposed(self): """Test if this is an unexposed kind.""" return conf.lib.clang_isUnexposed(self)
[ "def", "is_unexposed", "(", "self", ")", ":", "return", "conf", ".", "lib", ".", "clang_isUnexposed", "(", "self", ")" ]
https://github.com/cocos-creator/engine-native/blob/984c4c9f5838253313b44ccd429bd8fac4ec8a6a/tools/bindings-generator/clang/cindex.py#L703-L705
okex/V3-Open-API-SDK
c5abb0db7e2287718e0055e17e57672ce0ec7fd9
okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_internal/vcs/subversion.py
python
Subversion.export
(self, location)
Export the svn repository at the url to the destination location
Export the svn repository at the url to the destination location
[ "Export", "the", "svn", "repository", "at", "the", "url", "to", "the", "destination", "location" ]
def export(self, location): """Export the svn repository at the url to the destination location""" url, rev_options = self.get_url_rev_options(self.url) logger.info('Exporting svn repository %s to %s', url, location) with indent_log(): if os.path.exists(location): # Subversion doesn't like to check out over an existing # directory --force fixes this, but was only added in svn 1.5 rmtree(location) cmd_args = ['export'] + rev_options.to_args() + [url, location] self.run_command(cmd_args, show_stdout=False)
[ "def", "export", "(", "self", ",", "location", ")", ":", "url", ",", "rev_options", "=", "self", ".", "get_url_rev_options", "(", "self", ".", "url", ")", "logger", ".", "info", "(", "'Exporting svn repository %s to %s'", ",", "url", ",", "location", ")", "with", "indent_log", "(", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "location", ")", ":", "# Subversion doesn't like to check out over an existing", "# directory --force fixes this, but was only added in svn 1.5", "rmtree", "(", "location", ")", "cmd_args", "=", "[", "'export'", "]", "+", "rev_options", ".", "to_args", "(", ")", "+", "[", "url", ",", "location", "]", "self", ".", "run_command", "(", "cmd_args", ",", "show_stdout", "=", "False", ")" ]
https://github.com/okex/V3-Open-API-SDK/blob/c5abb0db7e2287718e0055e17e57672ce0ec7fd9/okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_internal/vcs/subversion.py#L31-L42
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
src/external/boost/boost_1_68_0/tools/build/src/build/targets.py
python
BasicTarget.construct
(self, name, source_targets, properties)
Constructs the virtual targets for this abstract targets and the dependecy graph. Returns a tuple consisting of the properties and the list of virtual targets. Should be overrided in derived classes.
Constructs the virtual targets for this abstract targets and the dependecy graph. Returns a tuple consisting of the properties and the list of virtual targets. Should be overrided in derived classes.
[ "Constructs", "the", "virtual", "targets", "for", "this", "abstract", "targets", "and", "the", "dependecy", "graph", ".", "Returns", "a", "tuple", "consisting", "of", "the", "properties", "and", "the", "list", "of", "virtual", "targets", ".", "Should", "be", "overrided", "in", "derived", "classes", "." ]
def construct (self, name, source_targets, properties): """ Constructs the virtual targets for this abstract targets and the dependecy graph. Returns a tuple consisting of the properties and the list of virtual targets. Should be overrided in derived classes. """ raise BaseException ("method should be defined in derived classes")
[ "def", "construct", "(", "self", ",", "name", ",", "source_targets", ",", "properties", ")", ":", "raise", "BaseException", "(", "\"method should be defined in derived classes\"", ")" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/boost/boost_1_68_0/tools/build/src/build/targets.py#L1372-L1377
francinexue/xuefu
b6ff79747a42e020588c0c0a921048e08fe4680c
api/ctpx/ctptd.py
python
CtpTd.onRtnFromBankToFutureByFuture
(self, RspTransferField)
期货发起银行资金转期货通知
期货发起银行资金转期货通知
[ "期货发起银行资金转期货通知" ]
def onRtnFromBankToFutureByFuture(self, RspTransferField): """期货发起银行资金转期货通知""" pass
[ "def", "onRtnFromBankToFutureByFuture", "(", "self", ",", "RspTransferField", ")", ":", "pass" ]
https://github.com/francinexue/xuefu/blob/b6ff79747a42e020588c0c0a921048e08fe4680c/api/ctpx/ctptd.py#L475-L477
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_core.py
python
PyApp.GetTraitsIfExists
(*args, **kwargs)
return _core_.PyApp_GetTraitsIfExists(*args, **kwargs)
GetTraitsIfExists() -> wxAppTraits This function provides safer access to traits object than wx.GetApp().GetTraits() during startup or termination when the global application object itself may be unavailable.
GetTraitsIfExists() -> wxAppTraits
[ "GetTraitsIfExists", "()", "-", ">", "wxAppTraits" ]
def GetTraitsIfExists(*args, **kwargs): """ GetTraitsIfExists() -> wxAppTraits This function provides safer access to traits object than wx.GetApp().GetTraits() during startup or termination when the global application object itself may be unavailable. """ return _core_.PyApp_GetTraitsIfExists(*args, **kwargs)
[ "def", "GetTraitsIfExists", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "PyApp_GetTraitsIfExists", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L7819-L7827
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/google_appengine_cloudstorage/cloudstorage/storage_api.py
python
StreamingBuffer.write
(self, data)
Write some bytes. Args: data: data to write. str. Raises: TypeError: if data is not of type str.
Write some bytes.
[ "Write", "some", "bytes", "." ]
def write(self, data): """Write some bytes. Args: data: data to write. str. Raises: TypeError: if data is not of type str. """ self._check_open() if not isinstance(data, str): raise TypeError('Expected str but got %s.' % type(data)) if not data: return self._buffer.append(data) self._buffered += len(data) self._offset += len(data) if self._buffered >= self._blocksize: self._flush()
[ "def", "write", "(", "self", ",", "data", ")", ":", "self", ".", "_check_open", "(", ")", "if", "not", "isinstance", "(", "data", ",", "str", ")", ":", "raise", "TypeError", "(", "'Expected str but got %s.'", "%", "type", "(", "data", ")", ")", "if", "not", "data", ":", "return", "self", ".", "_buffer", ".", "append", "(", "data", ")", "self", ".", "_buffered", "+=", "len", "(", "data", ")", "self", ".", "_offset", "+=", "len", "(", "data", ")", "if", "self", ".", "_buffered", ">=", "self", ".", "_blocksize", ":", "self", ".", "_flush", "(", ")" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/google_appengine_cloudstorage/cloudstorage/storage_api.py#L683-L701
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/grid.py
python
GridCellBoolEditor_IsTrueValue
(*args, **kwargs)
return _grid.GridCellBoolEditor_IsTrueValue(*args, **kwargs)
GridCellBoolEditor_IsTrueValue(String value) -> bool
GridCellBoolEditor_IsTrueValue(String value) -> bool
[ "GridCellBoolEditor_IsTrueValue", "(", "String", "value", ")", "-", ">", "bool" ]
def GridCellBoolEditor_IsTrueValue(*args, **kwargs): """GridCellBoolEditor_IsTrueValue(String value) -> bool""" return _grid.GridCellBoolEditor_IsTrueValue(*args, **kwargs)
[ "def", "GridCellBoolEditor_IsTrueValue", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_grid", ".", "GridCellBoolEditor_IsTrueValue", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/grid.py#L477-L479
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_core.py
python
CloseEvent.SetCanVeto
(*args, **kwargs)
return _core_.CloseEvent_SetCanVeto(*args, **kwargs)
SetCanVeto(self, bool canVeto) Sets the 'can veto' flag.
SetCanVeto(self, bool canVeto)
[ "SetCanVeto", "(", "self", "bool", "canVeto", ")" ]
def SetCanVeto(*args, **kwargs): """ SetCanVeto(self, bool canVeto) Sets the 'can veto' flag. """ return _core_.CloseEvent_SetCanVeto(*args, **kwargs)
[ "def", "SetCanVeto", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "CloseEvent_SetCanVeto", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L6542-L6548
yyzybb537/libgo
4af17b7c67643c4d54aa354dcc77963ea07847d0
third_party/boost.context/tools/build/src/build/type.py
python
all_derived
(type)
return result
Returns type and all classes that derive from it, in the order of their distance from type.
Returns type and all classes that derive from it, in the order of their distance from type.
[ "Returns", "type", "and", "all", "classes", "that", "derive", "from", "it", "in", "the", "order", "of", "their", "distance", "from", "type", "." ]
def all_derived (type): """ Returns type and all classes that derive from it, in the order of their distance from type. """ assert isinstance(type, basestring) result = [type] for d in __types [type]['derived']: result.extend (all_derived (d)) return result
[ "def", "all_derived", "(", "type", ")", ":", "assert", "isinstance", "(", "type", ",", "basestring", ")", "result", "=", "[", "type", "]", "for", "d", "in", "__types", "[", "type", "]", "[", "'derived'", "]", ":", "result", ".", "extend", "(", "all_derived", "(", "d", ")", ")", "return", "result" ]
https://github.com/yyzybb537/libgo/blob/4af17b7c67643c4d54aa354dcc77963ea07847d0/third_party/boost.context/tools/build/src/build/type.py#L187-L195
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/external/bazel_tools/third_party/py/concurrent/futures/_base.py
python
Future.exception
(self, timeout=None)
Return the exception raised by the call that the future represents. Args: timeout: The number of seconds to wait for the exception if the future isn't done. If None, then there is no limit on the wait time. Returns: The exception raised by the call that the future represents or None if the call completed without raising. Raises: CancelledError: If the future was cancelled. TimeoutError: If the future didn't finish executing before the given timeout.
Return the exception raised by the call that the future represents.
[ "Return", "the", "exception", "raised", "by", "the", "call", "that", "the", "future", "represents", "." ]
def exception(self, timeout=None): """Return the exception raised by the call that the future represents. Args: timeout: The number of seconds to wait for the exception if the future isn't done. If None, then there is no limit on the wait time. Returns: The exception raised by the call that the future represents or None if the call completed without raising. Raises: CancelledError: If the future was cancelled. TimeoutError: If the future didn't finish executing before the given timeout. """ with self._condition: if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: raise CancelledError() elif self._state == FINISHED: return self._exception self._condition.wait(timeout) if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: raise CancelledError() elif self._state == FINISHED: return self._exception else: raise TimeoutError()
[ "def", "exception", "(", "self", ",", "timeout", "=", "None", ")", ":", "with", "self", ".", "_condition", ":", "if", "self", ".", "_state", "in", "[", "CANCELLED", ",", "CANCELLED_AND_NOTIFIED", "]", ":", "raise", "CancelledError", "(", ")", "elif", "self", ".", "_state", "==", "FINISHED", ":", "return", "self", ".", "_exception", "self", ".", "_condition", ".", "wait", "(", "timeout", ")", "if", "self", ".", "_state", "in", "[", "CANCELLED", ",", "CANCELLED_AND_NOTIFIED", "]", ":", "raise", "CancelledError", "(", ")", "elif", "self", ".", "_state", "==", "FINISHED", ":", "return", "self", ".", "_exception", "else", ":", "raise", "TimeoutError", "(", ")" ]
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/external/bazel_tools/third_party/py/concurrent/futures/_base.py#L409-L440
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/client/session.py
python
BaseSession._make_callable_from_options
(self, callable_options)
return BaseSession._Callable(self, callable_options)
Returns a handle to a "callable" with the given options. Args: callable_options: A `CallableOptions` protocol buffer message describing the computation that will be performed by the callable. Returns: A handle to the new callable.
Returns a handle to a "callable" with the given options.
[ "Returns", "a", "handle", "to", "a", "callable", "with", "the", "given", "options", "." ]
def _make_callable_from_options(self, callable_options): """Returns a handle to a "callable" with the given options. Args: callable_options: A `CallableOptions` protocol buffer message describing the computation that will be performed by the callable. Returns: A handle to the new callable. """ self._extend_graph() return BaseSession._Callable(self, callable_options)
[ "def", "_make_callable_from_options", "(", "self", ",", "callable_options", ")", ":", "self", ".", "_extend_graph", "(", ")", "return", "BaseSession", ".", "_Callable", "(", "self", ",", "callable_options", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/client/session.py#L1502-L1513
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqtinterfaces/mantidqtinterfaces/HFIR_4Circle_Reduction/generalplotview.py
python
GeneralPlotWindow.do_export_plot
(self)
export plot :return:
export plot :return:
[ "export", "plot", ":", "return", ":" ]
def do_export_plot(self): """ export plot :return: """ # get directory file_name = QFileDialog.getSaveFileName(self, caption='File to save the plot', directory=self._work_dir, filter='Data File(*.dat);;All Files(*.*') if not file_name: return if isinstance(file_name, tuple): file_name = file_name[0] self.ui.graphicsView_plotView.save_current_plot(None, file_name)
[ "def", "do_export_plot", "(", "self", ")", ":", "# get directory", "file_name", "=", "QFileDialog", ".", "getSaveFileName", "(", "self", ",", "caption", "=", "'File to save the plot'", ",", "directory", "=", "self", ".", "_work_dir", ",", "filter", "=", "'Data File(*.dat);;All Files(*.*'", ")", "if", "not", "file_name", ":", "return", "if", "isinstance", "(", "file_name", ",", "tuple", ")", ":", "file_name", "=", "file_name", "[", "0", "]", "self", ".", "ui", ".", "graphicsView_plotView", ".", "save_current_plot", "(", "None", ",", "file_name", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/HFIR_4Circle_Reduction/generalplotview.py#L51-L65
oracle/graaljs
36a56e8e993d45fc40939a3a4d9c0c24990720f1
graal-nodejs/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
python
GetJavaJars
(target_list, target_dicts, toplevel_dir)
Generates a sequence of all .jars used as inputs.
Generates a sequence of all .jars used as inputs.
[ "Generates", "a", "sequence", "of", "all", ".", "jars", "used", "as", "inputs", "." ]
def GetJavaJars(target_list, target_dicts, toplevel_dir): """Generates a sequence of all .jars used as inputs.""" for target_name in target_list: target = target_dicts[target_name] for action in target.get("actions", []): for input_ in action["inputs"]: if os.path.splitext(input_)[1] == ".jar" and not input_.startswith("$"): if os.path.isabs(input_): yield input_ else: yield os.path.join(os.path.dirname(target_name), input_)
[ "def", "GetJavaJars", "(", "target_list", ",", "target_dicts", ",", "toplevel_dir", ")", ":", "for", "target_name", "in", "target_list", ":", "target", "=", "target_dicts", "[", "target_name", "]", "for", "action", "in", "target", ".", "get", "(", "\"actions\"", ",", "[", "]", ")", ":", "for", "input_", "in", "action", "[", "\"inputs\"", "]", ":", "if", "os", ".", "path", ".", "splitext", "(", "input_", ")", "[", "1", "]", "==", "\".jar\"", "and", "not", "input_", ".", "startswith", "(", "\"$\"", ")", ":", "if", "os", ".", "path", ".", "isabs", "(", "input_", ")", ":", "yield", "input_", "else", ":", "yield", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "target_name", ")", ",", "input_", ")" ]
https://github.com/oracle/graaljs/blob/36a56e8e993d45fc40939a3a4d9c0c24990720f1/graal-nodejs/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py#L409-L419
fifengine/fifengine
4b62c42e85bec19893cef8e63e6855927cff2c47
engine/python/fife/extensions/pychan/__init__.py
python
setupModalExecution
(mainLoop,breakFromMainLoop)
Setup the synchronous dialog execution feature. You can enable synchronous dialog execution by passing to functions to this function. @param mainLoop: Function - This is regarded as the applications main loop, which should be able to be called recursively. It should not take no arguments and return the argument passed to the second function (breakFromMainLoop). @param breakFromMainLoop: Function -This function should cause the first function to finish and return the passed argument. With these to functions dialogs can be executed synchronously. See L{widgets.Widget.execute}.
Setup the synchronous dialog execution feature.
[ "Setup", "the", "synchronous", "dialog", "execution", "feature", "." ]
def setupModalExecution(mainLoop,breakFromMainLoop): """ Setup the synchronous dialog execution feature. You can enable synchronous dialog execution by passing to functions to this function. @param mainLoop: Function - This is regarded as the applications main loop, which should be able to be called recursively. It should not take no arguments and return the argument passed to the second function (breakFromMainLoop). @param breakFromMainLoop: Function -This function should cause the first function to finish and return the passed argument. With these to functions dialogs can be executed synchronously. See L{widgets.Widget.execute}. """ if not manager: raise InitializationError("PyChan is not initialized yet.") manager.setupModalExecution(mainLoop,breakFromMainLoop)
[ "def", "setupModalExecution", "(", "mainLoop", ",", "breakFromMainLoop", ")", ":", "if", "not", "manager", ":", "raise", "InitializationError", "(", "\"PyChan is not initialized yet.\"", ")", "manager", ".", "setupModalExecution", "(", "mainLoop", ",", "breakFromMainLoop", ")" ]
https://github.com/fifengine/fifengine/blob/4b62c42e85bec19893cef8e63e6855927cff2c47/engine/python/fife/extensions/pychan/__init__.py#L405-L425
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/xrc.py
python
XmlResourceHandler.GetDimension
(*args, **kwargs)
return _xrc.XmlResourceHandler_GetDimension(*args, **kwargs)
GetDimension(self, String param, int defaultv=0) -> int
GetDimension(self, String param, int defaultv=0) -> int
[ "GetDimension", "(", "self", "String", "param", "int", "defaultv", "=", "0", ")", "-", ">", "int" ]
def GetDimension(*args, **kwargs): """GetDimension(self, String param, int defaultv=0) -> int""" return _xrc.XmlResourceHandler_GetDimension(*args, **kwargs)
[ "def", "GetDimension", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_xrc", ".", "XmlResourceHandler_GetDimension", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/xrc.py#L691-L693
gromacs/gromacs
7dec3a3f99993cf5687a122de3e12de31c21c399
docs/doxygen/gmxtree.py
python
File.get_used_define_files
(self)
return set(self._used_defines.keys())
Return files like config.h whose defines are used in this file. The return value is empty if find_define_file_uses() has not been called, as well as for headers that declare these defines.
Return files like config.h whose defines are used in this file.
[ "Return", "files", "like", "config", ".", "h", "whose", "defines", "are", "used", "in", "this", "file", "." ]
def get_used_define_files(self): """Return files like config.h whose defines are used in this file. The return value is empty if find_define_file_uses() has not been called, as well as for headers that declare these defines.""" return set(self._used_defines.keys())
[ "def", "get_used_define_files", "(", "self", ")", ":", "return", "set", "(", "self", ".", "_used_defines", ".", "keys", "(", ")", ")" ]
https://github.com/gromacs/gromacs/blob/7dec3a3f99993cf5687a122de3e12de31c21c399/docs/doxygen/gmxtree.py#L353-L358
baidu/lac
3e10dbed9bfd87bea927c84a6627a167c17b5617
python/LAC/ahocorasick.py
python
Ahocorasick.__init__
(self)
初始化Ahocorasick的根节点__root
初始化Ahocorasick的根节点__root
[ "初始化Ahocorasick的根节点__root" ]
def __init__(self): """初始化Ahocorasick的根节点__root""" self.__root = Node()
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "__root", "=", "Node", "(", ")" ]
https://github.com/baidu/lac/blob/3e10dbed9bfd87bea927c84a6627a167c17b5617/python/LAC/ahocorasick.py#L50-L52
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/linear_optimizer/python/sdca_estimator.py
python
SDCALinearRegressor.__init__
(self, example_id_column, feature_columns, weight_column_name=None, model_dir=None, l1_regularization=0.0, l2_regularization=1.0, num_loss_partitions=None, config=None, feature_engineering_fn=None, partitioner=None)
Construct a `SDCALinearRegressor` estimator object. Args: example_id_column: A string defining the feature column name representing example ids. Used to initialize the underlying SDCA optimizer. feature_columns: An iterable containing all the feature columns used by the model. All items in the iterable should derive from `FeatureColumn`. Note that the order of the items is ignored at model construction time. weight_column_name: A string defining feature column name representing weights. It is used to down weight or boost examples during training. It will be multiplied by the loss of the example. model_dir: Directory to save model parameters, graph etc. This can also be used to load checkpoints from the directory into an estimator to continue training a previously saved model. l1_regularization: L1-regularization parameter. Refers to global L1 regularization (across all examples). l2_regularization: L2-regularization parameter. Refers to global L2 regularization (across all examples). num_loss_partitions: number of partitions of the (global) loss function optimized by the underlying optimizer (SDCAOptimizer). config: `RunConfig` object to configure the runtime settings. feature_engineering_fn: Feature engineering function. Takes features and labels which are the output of `input_fn` and returns features and labels which will be fed into the model. partitioner: Variable partitioner for the primal weights (`div` partitioning strategy will be used). Returns: A `SDCALinearRegressor` estimator.
Construct a `SDCALinearRegressor` estimator object.
[ "Construct", "a", "SDCALinearRegressor", "estimator", "object", "." ]
def __init__(self, example_id_column, feature_columns, weight_column_name=None, model_dir=None, l1_regularization=0.0, l2_regularization=1.0, num_loss_partitions=None, config=None, feature_engineering_fn=None, partitioner=None): """Construct a `SDCALinearRegressor` estimator object. Args: example_id_column: A string defining the feature column name representing example ids. Used to initialize the underlying SDCA optimizer. feature_columns: An iterable containing all the feature columns used by the model. All items in the iterable should derive from `FeatureColumn`. Note that the order of the items is ignored at model construction time. weight_column_name: A string defining feature column name representing weights. It is used to down weight or boost examples during training. It will be multiplied by the loss of the example. model_dir: Directory to save model parameters, graph etc. This can also be used to load checkpoints from the directory into an estimator to continue training a previously saved model. l1_regularization: L1-regularization parameter. Refers to global L1 regularization (across all examples). l2_regularization: L2-regularization parameter. Refers to global L2 regularization (across all examples). num_loss_partitions: number of partitions of the (global) loss function optimized by the underlying optimizer (SDCAOptimizer). config: `RunConfig` object to configure the runtime settings. feature_engineering_fn: Feature engineering function. Takes features and labels which are the output of `input_fn` and returns features and labels which will be fed into the model. partitioner: Variable partitioner for the primal weights (`div` partitioning strategy will be used). Returns: A `SDCALinearRegressor` estimator. """ super(SDCALinearRegressor, self).__init__( example_id_column=example_id_column, feature_columns=feature_columns, weight_column_name=weight_column_name, model_dir=model_dir, head=head_lib.regression_head(weight_column_name=weight_column_name), l1_regularization=l1_regularization, l2_regularization=l2_regularization, num_loss_partitions=num_loss_partitions, config=config, feature_engineering_fn=None, partitioner=partitioner)
[ "def", "__init__", "(", "self", ",", "example_id_column", ",", "feature_columns", ",", "weight_column_name", "=", "None", ",", "model_dir", "=", "None", ",", "l1_regularization", "=", "0.0", ",", "l2_regularization", "=", "1.0", ",", "num_loss_partitions", "=", "None", ",", "config", "=", "None", ",", "feature_engineering_fn", "=", "None", ",", "partitioner", "=", "None", ")", ":", "super", "(", "SDCALinearRegressor", ",", "self", ")", ".", "__init__", "(", "example_id_column", "=", "example_id_column", ",", "feature_columns", "=", "feature_columns", ",", "weight_column_name", "=", "weight_column_name", ",", "model_dir", "=", "model_dir", ",", "head", "=", "head_lib", ".", "regression_head", "(", "weight_column_name", "=", "weight_column_name", ")", ",", "l1_regularization", "=", "l1_regularization", ",", "l2_regularization", "=", "l2_regularization", ",", "num_loss_partitions", "=", "num_loss_partitions", ",", "config", "=", "config", ",", "feature_engineering_fn", "=", "None", ",", "partitioner", "=", "partitioner", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/linear_optimizer/python/sdca_estimator.py#L468-L521
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/python/ops/linalg_ops.py
python
batch_svd
(tensor, compute_uv=True, full_matrices=False, name=None)
Computes the singular value decompositions of a batch of matrices. Computes the SVD of each inner matrix in `tensor` such that `tensor[..., :, :] = u[..., :, :] * diag(s[..., :, :]) * transpose(v[..., :, :])` ```prettyprint # a is a tensor. # s is a tensor of singular values. # u is a tensor of left singular vectors. # v is a tensor of right singular vectors. s, u, v = batch_svd(a) s = batch_svd(a, compute_uv=False) ``` Args: matrix: `Tensor` of shape `[..., M, N]`. Let `P` be the minimum of `M` and `N`. compute_uv: If `True` then left and right singular vectors will be computed and returned in `u` and `v`, respectively. Otherwise, only the singular values will be computed, which can be significantly faster. full_matrices: If true, compute full-sized `u` and `v`. If false (the default), compute only the leading `P` singular vectors. Ignored if `compute_uv` is `False`. name: string, optional name of the operation. Returns: s: Singular values. Shape is `[..., P]`. u: Right singular vectors. If `full_matrices` is `False` (default) then shape is `[..., M, P]`; if `full_matrices` is `True` then shape is `[..., M, M]`. Not returned if `compute_uv` is `False`. v: Left singular vectors. If `full_matrices` is `False` (default) then shape is `[..., N, P]`. If `full_matrices` is `True` then shape is `[..., N, N]`. Not returned if `compute_uv` is `False`.
Computes the singular value decompositions of a batch of matrices.
[ "Computes", "the", "singular", "value", "decompositions", "of", "a", "batch", "of", "matrices", "." ]
def batch_svd(tensor, compute_uv=True, full_matrices=False, name=None): """Computes the singular value decompositions of a batch of matrices. Computes the SVD of each inner matrix in `tensor` such that `tensor[..., :, :] = u[..., :, :] * diag(s[..., :, :]) * transpose(v[..., :, :])` ```prettyprint # a is a tensor. # s is a tensor of singular values. # u is a tensor of left singular vectors. # v is a tensor of right singular vectors. s, u, v = batch_svd(a) s = batch_svd(a, compute_uv=False) ``` Args: matrix: `Tensor` of shape `[..., M, N]`. Let `P` be the minimum of `M` and `N`. compute_uv: If `True` then left and right singular vectors will be computed and returned in `u` and `v`, respectively. Otherwise, only the singular values will be computed, which can be significantly faster. full_matrices: If true, compute full-sized `u` and `v`. If false (the default), compute only the leading `P` singular vectors. Ignored if `compute_uv` is `False`. name: string, optional name of the operation. Returns: s: Singular values. Shape is `[..., P]`. u: Right singular vectors. If `full_matrices` is `False` (default) then shape is `[..., M, P]`; if `full_matrices` is `True` then shape is `[..., M, M]`. Not returned if `compute_uv` is `False`. v: Left singular vectors. If `full_matrices` is `False` (default) then shape is `[..., N, P]`. If `full_matrices` is `True` then shape is `[..., N, N]`. Not returned if `compute_uv` is `False`. """ s, u, v = gen_linalg_ops.batch_svd( tensor, compute_uv=compute_uv, full_matrices=full_matrices) if compute_uv: return s, u, v else: return s
[ "def", "batch_svd", "(", "tensor", ",", "compute_uv", "=", "True", ",", "full_matrices", "=", "False", ",", "name", "=", "None", ")", ":", "s", ",", "u", ",", "v", "=", "gen_linalg_ops", ".", "batch_svd", "(", "tensor", ",", "compute_uv", "=", "compute_uv", ",", "full_matrices", "=", "full_matrices", ")", "if", "compute_uv", ":", "return", "s", ",", "u", ",", "v", "else", ":", "return", "s" ]
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/python/ops/linalg_ops.py#L522-L563
larroy/clearskies_core
3574ddf0edc8555454c7044126e786a6c29444dc
tools/gyp/pylib/gyp/generator/ninja.py
python
NinjaWriter.__init__
(self, qualified_target, target_outputs, base_dir, build_dir, output_file, toplevel_build, output_file_name, flavor, toplevel_dir=None)
base_dir: path from source root to directory containing this gyp file, by gyp semantics, all input paths are relative to this build_dir: path from source root to build output toplevel_dir: path to the toplevel directory
base_dir: path from source root to directory containing this gyp file, by gyp semantics, all input paths are relative to this build_dir: path from source root to build output toplevel_dir: path to the toplevel directory
[ "base_dir", ":", "path", "from", "source", "root", "to", "directory", "containing", "this", "gyp", "file", "by", "gyp", "semantics", "all", "input", "paths", "are", "relative", "to", "this", "build_dir", ":", "path", "from", "source", "root", "to", "build", "output", "toplevel_dir", ":", "path", "to", "the", "toplevel", "directory" ]
def __init__(self, qualified_target, target_outputs, base_dir, build_dir, output_file, toplevel_build, output_file_name, flavor, toplevel_dir=None): """ base_dir: path from source root to directory containing this gyp file, by gyp semantics, all input paths are relative to this build_dir: path from source root to build output toplevel_dir: path to the toplevel directory """ self.qualified_target = qualified_target self.target_outputs = target_outputs self.base_dir = base_dir self.build_dir = build_dir self.ninja = ninja_syntax.Writer(output_file) self.toplevel_build = toplevel_build self.output_file_name = output_file_name self.flavor = flavor self.abs_build_dir = None if toplevel_dir is not None: self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, build_dir)) self.obj_ext = '.obj' if flavor == 'win' else '.o' if flavor == 'win': # See docstring of msvs_emulation.GenerateEnvironmentFiles(). self.win_env = {} for arch in ('x86', 'x64'): self.win_env[arch] = 'environment.' + arch # Relative path from build output dir to base dir. build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) self.build_to_base = os.path.join(build_to_top, base_dir) # Relative path from base dir to build dir. base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) self.base_to_build = os.path.join(base_to_top, build_dir)
[ "def", "__init__", "(", "self", ",", "qualified_target", ",", "target_outputs", ",", "base_dir", ",", "build_dir", ",", "output_file", ",", "toplevel_build", ",", "output_file_name", ",", "flavor", ",", "toplevel_dir", "=", "None", ")", ":", "self", ".", "qualified_target", "=", "qualified_target", "self", ".", "target_outputs", "=", "target_outputs", "self", ".", "base_dir", "=", "base_dir", "self", ".", "build_dir", "=", "build_dir", "self", ".", "ninja", "=", "ninja_syntax", ".", "Writer", "(", "output_file", ")", "self", ".", "toplevel_build", "=", "toplevel_build", "self", ".", "output_file_name", "=", "output_file_name", "self", ".", "flavor", "=", "flavor", "self", ".", "abs_build_dir", "=", "None", "if", "toplevel_dir", "is", "not", "None", ":", "self", ".", "abs_build_dir", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "toplevel_dir", ",", "build_dir", ")", ")", "self", ".", "obj_ext", "=", "'.obj'", "if", "flavor", "==", "'win'", "else", "'.o'", "if", "flavor", "==", "'win'", ":", "# See docstring of msvs_emulation.GenerateEnvironmentFiles().", "self", ".", "win_env", "=", "{", "}", "for", "arch", "in", "(", "'x86'", ",", "'x64'", ")", ":", "self", ".", "win_env", "[", "arch", "]", "=", "'environment.'", "+", "arch", "# Relative path from build output dir to base dir.", "build_to_top", "=", "gyp", ".", "common", ".", "InvertRelativePath", "(", "build_dir", ",", "toplevel_dir", ")", "self", ".", "build_to_base", "=", "os", ".", "path", ".", "join", "(", "build_to_top", ",", "base_dir", ")", "# Relative path from base dir to build dir.", "base_to_top", "=", "gyp", ".", "common", ".", "InvertRelativePath", "(", "base_dir", ",", "toplevel_dir", ")", "self", ".", "base_to_build", "=", "os", ".", "path", ".", "join", "(", "base_to_top", ",", "build_dir", ")" ]
https://github.com/larroy/clearskies_core/blob/3574ddf0edc8555454c7044126e786a6c29444dc/tools/gyp/pylib/gyp/generator/ninja.py#L215-L250
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/animate.py
python
Animation.GetFramePosition
(*args, **kwargs)
return _animate.Animation_GetFramePosition(*args, **kwargs)
GetFramePosition(self, int frame) -> Point
GetFramePosition(self, int frame) -> Point
[ "GetFramePosition", "(", "self", "int", "frame", ")", "-", ">", "Point" ]
def GetFramePosition(*args, **kwargs): """GetFramePosition(self, int frame) -> Point""" return _animate.Animation_GetFramePosition(*args, **kwargs)
[ "def", "GetFramePosition", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_animate", ".", "Animation_GetFramePosition", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/animate.py#L122-L124
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/dataflow.py
python
DataFlowAnalysis.op_SLICE_2
(self, info, inst)
TOS = TOS1[:TOS]
TOS = TOS1[:TOS]
[ "TOS", "=", "TOS1", "[", ":", "TOS", "]" ]
def op_SLICE_2(self, info, inst): """ TOS = TOS1[:TOS] """ tos = info.pop() tos1 = info.pop() res = info.make_temp() slicevar = info.make_temp() indexvar = info.make_temp() nonevar = info.make_temp() info.append(inst, base=tos1, stop=tos, res=res, slicevar=slicevar, indexvar=indexvar, nonevar=nonevar) info.push(res)
[ "def", "op_SLICE_2", "(", "self", ",", "info", ",", "inst", ")", ":", "tos", "=", "info", ".", "pop", "(", ")", "tos1", "=", "info", ".", "pop", "(", ")", "res", "=", "info", ".", "make_temp", "(", ")", "slicevar", "=", "info", ".", "make_temp", "(", ")", "indexvar", "=", "info", ".", "make_temp", "(", ")", "nonevar", "=", "info", ".", "make_temp", "(", ")", "info", ".", "append", "(", "inst", ",", "base", "=", "tos1", ",", "stop", "=", "tos", ",", "res", "=", "res", ",", "slicevar", "=", "slicevar", ",", "indexvar", "=", "indexvar", ",", "nonevar", "=", "nonevar", ")", "info", ".", "push", "(", "res", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/dataflow.py#L483-L495
fredakilla/GPlayEngine
ae6b45f4c68f696fcd171ce6996a5a4e80aee09e
thirdparty/freetype/src/tools/docmaker/content.py
python
ContentProcessor.reset
( self )
Reset the content processor for a new block.
Reset the content processor for a new block.
[ "Reset", "the", "content", "processor", "for", "a", "new", "block", "." ]
def reset( self ): """Reset the content processor for a new block.""" self.markups = [] self.markup = None self.markup_lines = []
[ "def", "reset", "(", "self", ")", ":", "self", ".", "markups", "=", "[", "]", "self", ".", "markup", "=", "None", "self", ".", "markup_lines", "=", "[", "]" ]
https://github.com/fredakilla/GPlayEngine/blob/ae6b45f4c68f696fcd171ce6996a5a4e80aee09e/thirdparty/freetype/src/tools/docmaker/content.py#L427-L431
brave/brave-core
ceaa3de4735789d355b6fa80c21d4709e2c1d0e8
script/lib/transifex.py
python
clean_triple_quoted_string
(val)
return val.strip()
Grit parses out first 3 and last 3 isngle quote chars if they exist.
Grit parses out first 3 and last 3 isngle quote chars if they exist.
[ "Grit", "parses", "out", "first", "3", "and", "last", "3", "isngle", "quote", "chars", "if", "they", "exist", "." ]
def clean_triple_quoted_string(val): """Grit parses out first 3 and last 3 isngle quote chars if they exist.""" val = val.strip() if val.startswith("'''"): val = val[3:] if val.endswith("'''"): val = val[:-3] return val.strip()
[ "def", "clean_triple_quoted_string", "(", "val", ")", ":", "val", "=", "val", ".", "strip", "(", ")", "if", "val", ".", "startswith", "(", "\"'''\"", ")", ":", "val", "=", "val", "[", "3", ":", "]", "if", "val", ".", "endswith", "(", "\"'''\"", ")", ":", "val", "=", "val", "[", ":", "-", "3", "]", "return", "val", ".", "strip", "(", ")" ]
https://github.com/brave/brave-core/blob/ceaa3de4735789d355b6fa80c21d4709e2c1d0e8/script/lib/transifex.py#L321-L328
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/vcs/git.py
python
Git.get_url_rev_and_auth
(cls, url)
return url, rev, user_pass
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. That's required because although they use SSH they sometimes don't work with a ssh:// scheme (e.g. GitHub). But we need a scheme for parsing. Hence we remove it again afterwards and return it as a stub.
[]
def get_url_rev_and_auth(cls, url): # type: (str) -> Tuple[str, Optional[str], AuthInfo] """ Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. That's required because although they use SSH they sometimes don't work with a ssh:// scheme (e.g. GitHub). But we need a scheme for parsing. Hence we remove it again afterwards and return it as a stub. """ # Works around an apparent Git bug # (see https://article.gmane.org/gmane.comp.version-control.git/146500) scheme, netloc, path, query, fragment = urlsplit(url) if scheme.endswith('file'): initial_slashes = path[:-len(path.lstrip('/'))] newpath = ( initial_slashes + urllib.request.url2pathname(path) .replace('\\', '/').lstrip('/') ) after_plus = scheme.find('+') + 1 url = scheme[:after_plus] + urlunsplit( (scheme[after_plus:], netloc, newpath, query, fragment), ) if '://' not in url: assert 'file:' not in url url = url.replace('git+', 'git+ssh://') url, rev, user_pass = super().get_url_rev_and_auth(url) url = url.replace('ssh://', '') else: url, rev, user_pass = super().get_url_rev_and_auth(url) return url, rev, user_pass
[ "def", "get_url_rev_and_auth", "(", "cls", ",", "url", ")", ":", "# type: (str) -> Tuple[str, Optional[str], AuthInfo]", "# Works around an apparent Git bug", "# (see https://article.gmane.org/gmane.comp.version-control.git/146500)", "scheme", ",", "netloc", ",", "path", ",", "query", ",", "fragment", "=", "urlsplit", "(", "url", ")", "if", "scheme", ".", "endswith", "(", "'file'", ")", ":", "initial_slashes", "=", "path", "[", ":", "-", "len", "(", "path", ".", "lstrip", "(", "'/'", ")", ")", "]", "newpath", "=", "(", "initial_slashes", "+", "urllib", ".", "request", ".", "url2pathname", "(", "path", ")", ".", "replace", "(", "'\\\\'", ",", "'/'", ")", ".", "lstrip", "(", "'/'", ")", ")", "after_plus", "=", "scheme", ".", "find", "(", "'+'", ")", "+", "1", "url", "=", "scheme", "[", ":", "after_plus", "]", "+", "urlunsplit", "(", "(", "scheme", "[", "after_plus", ":", "]", ",", "netloc", ",", "newpath", ",", "query", ",", "fragment", ")", ",", ")", "if", "'://'", "not", "in", "url", ":", "assert", "'file:'", "not", "in", "url", "url", "=", "url", ".", "replace", "(", "'git+'", ",", "'git+ssh://'", ")", "url", ",", "rev", ",", "user_pass", "=", "super", "(", ")", ".", "get_url_rev_and_auth", "(", "url", ")", "url", "=", "url", ".", "replace", "(", "'ssh://'", ",", "''", ")", "else", ":", "url", ",", "rev", ",", "user_pass", "=", "super", "(", ")", ".", "get_url_rev_and_auth", "(", "url", ")", "return", "url", ",", "rev", ",", "user_pass" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/vcs/git.py#L777-L839
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/unicode.py
python
_slice_span
(typingctx, sliceobj)
return sig, codegen
Compute the span from the given slice object.
Compute the span from the given slice object.
[ "Compute", "the", "span", "from", "the", "given", "slice", "object", "." ]
def _slice_span(typingctx, sliceobj): """Compute the span from the given slice object. """ sig = types.intp(sliceobj) def codegen(context, builder, sig, args): [slicetype] = sig.args [sliceobj] = args slice = context.make_helper(builder, slicetype, sliceobj) result_size = slicing.get_slice_length(builder, slice) return result_size return sig, codegen
[ "def", "_slice_span", "(", "typingctx", ",", "sliceobj", ")", ":", "sig", "=", "types", ".", "intp", "(", "sliceobj", ")", "def", "codegen", "(", "context", ",", "builder", ",", "sig", ",", "args", ")", ":", "[", "slicetype", "]", "=", "sig", ".", "args", "[", "sliceobj", "]", "=", "args", "slice", "=", "context", ".", "make_helper", "(", "builder", ",", "slicetype", ",", "sliceobj", ")", "result_size", "=", "slicing", ".", "get_slice_length", "(", "builder", ",", "slice", ")", "return", "result_size", "return", "sig", ",", "codegen" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/unicode.py#L1537-L1549