nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
list
function
stringlengths
34
151k
function_tokens
list
url
stringlengths
90
278
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/stc.py
python
StyledTextCtrl.AutoCompSetCancelAtStart
(*args, **kwargs)
return _stc.StyledTextCtrl_AutoCompSetCancelAtStart(*args, **kwargs)
AutoCompSetCancelAtStart(self, bool cancel) Should the auto-completion list be cancelled if the user backspaces to a position before where the box was created.
AutoCompSetCancelAtStart(self, bool cancel)
[ "AutoCompSetCancelAtStart", "(", "self", "bool", "cancel", ")" ]
def AutoCompSetCancelAtStart(*args, **kwargs): """ AutoCompSetCancelAtStart(self, bool cancel) Should the auto-completion list be cancelled if the user backspaces to a position before where the box was created. """ return _stc.StyledTextCtrl_AutoCompSetCancelAtStart(*args, **kwargs)
[ "def", "AutoCompSetCancelAtStart", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_stc", ".", "StyledTextCtrl_AutoCompSetCancelAtStart", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/stc.py#L3103-L3110
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/grid.py
python
Grid.SetDefaultCellAlignment
(*args, **kwargs)
return _grid.Grid_SetDefaultCellAlignment(*args, **kwargs)
SetDefaultCellAlignment(self, int horiz, int vert)
SetDefaultCellAlignment(self, int horiz, int vert)
[ "SetDefaultCellAlignment", "(", "self", "int", "horiz", "int", "vert", ")" ]
def SetDefaultCellAlignment(*args, **kwargs): """SetDefaultCellAlignment(self, int horiz, int vert)""" return _grid.Grid_SetDefaultCellAlignment(*args, **kwargs)
[ "def", "SetDefaultCellAlignment", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_grid", ".", "Grid_SetDefaultCellAlignment", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/grid.py#L1958-L1960
reverbrain/elliptics
4b4f9b8094d7616c1ec50eb8605edb059b9f228e
bindings/python/src/route.py
python
RouteList.get_address_backend_routes
(self, address, backend_id)
return tuple(route for route in self.routes if (route.address, route.backend_id) == (address, backend_id))
Returns all routes for specified @address and @backend_id
Returns all routes for specified
[ "Returns", "all", "routes", "for", "specified" ]
def get_address_backend_routes(self, address, backend_id): """ Returns all routes for specified @address and @backend_id """ return tuple(route for route in self.routes if (route.address, route.backend_id) == (address, backend_id))
[ "def", "get_address_backend_routes", "(", "self", ",", "address", ",", "backend_id", ")", ":", "return", "tuple", "(", "route", "for", "route", "in", "self", ".", "routes", "if", "(", "route", ".", "address", ",", "route", ".", "backend_id", ")", "==", "(", "address", ",", "backend_id", ")", ")" ]
https://github.com/reverbrain/elliptics/blob/4b4f9b8094d7616c1ec50eb8605edb059b9f228e/bindings/python/src/route.py#L309-L313
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/io_ops.py
python
_restore_slice
(file_pattern, tensor_name, shape_and_slice, tensor_type, name="restore_slice", preferred_shard=-1)
return gen_io_ops.restore_slice( file_pattern, tensor_name, shape_and_slice, base_type, preferred_shard, name=name)
Restore a tensor slice from a set of files with a given pattern. Example usage: RestoreSlice("/foo/bar-?????-of-?????", "w", "10 10 0,2:-", DT_FLOAT) Args: file_pattern: the file pattern used to match a set of checkpoint files. tensor_name: the name of the tensor to restore. shape_and_slice: the shape-and-slice spec of the slice. tensor_type: the type of the tensor to restore. name: string. Optional name for the op. preferred_shard: Int. Optional shard to open first in the checkpoint file. Returns: A tensor of type "tensor_type".
Restore a tensor slice from a set of files with a given pattern.
[ "Restore", "a", "tensor", "slice", "from", "a", "set", "of", "files", "with", "a", "given", "pattern", "." ]
def _restore_slice(file_pattern, tensor_name, shape_and_slice, tensor_type, name="restore_slice", preferred_shard=-1): """Restore a tensor slice from a set of files with a given pattern. Example usage: RestoreSlice("/foo/bar-?????-of-?????", "w", "10 10 0,2:-", DT_FLOAT) Args: file_pattern: the file pattern used to match a set of checkpoint files. tensor_name: the name of the tensor to restore. shape_and_slice: the shape-and-slice spec of the slice. tensor_type: the type of the tensor to restore. name: string. Optional name for the op. preferred_shard: Int. Optional shard to open first in the checkpoint file. Returns: A tensor of type "tensor_type". """ base_type = dtypes.as_dtype(tensor_type).base_dtype return gen_io_ops.restore_slice( file_pattern, tensor_name, shape_and_slice, base_type, preferred_shard, name=name)
[ "def", "_restore_slice", "(", "file_pattern", ",", "tensor_name", ",", "shape_and_slice", ",", "tensor_type", ",", "name", "=", "\"restore_slice\"", ",", "preferred_shard", "=", "-", "1", ")", ":", "base_type", "=", "dtypes", ".", "as_dtype", "(", "tensor_type", ")", ".", "base_dtype", "return", "gen_io_ops", ".", "restore_slice", "(", "file_pattern", ",", "tensor_name", ",", "shape_and_slice", ",", "base_type", ",", "preferred_shard", ",", "name", "=", "name", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/io_ops.py#L75-L96
greearb/xorp.ct
b8942f69e9cb69a35adcaa58f3dd6b9a16629e6a
xorp/bgp/harness/harness.py
python
stop
()
Stop collecting data into the log file
Stop collecting data into the log file
[ "Stop", "collecting", "data", "into", "the", "log", "file" ]
def stop(): """ Stop collecting data into the log file """ coord("peer1 dump sent mrtd ipv4 traffic") coord("peer1 dump recv mrtd ipv4 traffic")
[ "def", "stop", "(", ")", ":", "coord", "(", "\"peer1 dump sent mrtd ipv4 traffic\"", ")", "coord", "(", "\"peer1 dump recv mrtd ipv4 traffic\"", ")" ]
https://github.com/greearb/xorp.ct/blob/b8942f69e9cb69a35adcaa58f3dd6b9a16629e6a/xorp/bgp/harness/harness.py#L103-L109
sdhash/sdhash
b9eff63e4e5867e910f41fd69032bbb1c94a2a5e
external/tools/build/v2/build/build_request.py
python
expand_no_defaults
(property_sets)
return [property_set.create(p) for p in product]
Expand the given build request by combining all property_sets which don't specify conflicting non-free features.
Expand the given build request by combining all property_sets which don't specify conflicting non-free features.
[ "Expand", "the", "given", "build", "request", "by", "combining", "all", "property_sets", "which", "don", "t", "specify", "conflicting", "non", "-", "free", "features", "." ]
def expand_no_defaults (property_sets): """ Expand the given build request by combining all property_sets which don't specify conflicting non-free features. """ # First make all features and subfeatures explicit expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets] # Now combine all of the expanded property_sets product = __x_product (expanded_property_sets) return [property_set.create(p) for p in product]
[ "def", "expand_no_defaults", "(", "property_sets", ")", ":", "# First make all features and subfeatures explicit", "expanded_property_sets", "=", "[", "ps", ".", "expand_subfeatures", "(", ")", "for", "ps", "in", "property_sets", "]", "# Now combine all of the expanded property_sets", "product", "=", "__x_product", "(", "expanded_property_sets", ")", "return", "[", "property_set", ".", "create", "(", "p", ")", "for", "p", "in", "product", "]" ]
https://github.com/sdhash/sdhash/blob/b9eff63e4e5867e910f41fd69032bbb1c94a2a5e/external/tools/build/v2/build/build_request.py#L16-L26
neoml-lib/neoml
a0d370fba05269a1b2258cef126f77bbd2054a3e
NeoML/Python/neoml/Blob.py
python
asblob
(math_engine, data, shape=None, copy=False)
return Blob(PythonWrapper.tensor(math_engine._internal, shape, dtype, np_data, bool(copy)))
Organizes the data from a memory buffer into a blob. :param neoml.MathEngine.MathEngine math_engine: the math engine that works with this blob. :param data: a pointer to the data. :type data: object :param shape: the target blob dimensions. If none are specified, a one-element blob will be assumed. :type shape: array of int, default=None :param copy: specifies if the data should be copied to another memory block or kept in the same place if possible, making the data parameter point to the body of the newly-created blob. Not copying may be impossible if the blob is in GPU memory. :type copy: bool, default=False
Organizes the data from a memory buffer into a blob.
[ "Organizes", "the", "data", "from", "a", "memory", "buffer", "into", "a", "blob", "." ]
def asblob(math_engine, data, shape=None, copy=False): """Organizes the data from a memory buffer into a blob. :param neoml.MathEngine.MathEngine math_engine: the math engine that works with this blob. :param data: a pointer to the data. :type data: object :param shape: the target blob dimensions. If none are specified, a one-element blob will be assumed. :type shape: array of int, default=None :param copy: specifies if the data should be copied to another memory block or kept in the same place if possible, making the data parameter point to the body of the newly-created blob. Not copying may be impossible if the blob is in GPU memory. :type copy: bool, default=False """ if shape is None: shape = np.ones(7, np.int32) else: shape = np.array(shape, dtype=np.int32, copy=False) if len(shape) != 7: raise ValueError('The `shape` must have 7 dimension sizes.') np_data = np.array(data, copy=False, order='C') if len(np_data.shape) > 7: raise ValueError('The `shape` must have not more then 7 dimensions.') if np.prod(np_data.shape) != np.prod(shape): raise ValueError('The blob must have as many elements as ndarray') dtype = 'none' if np_data.dtype == np.float32: dtype = 'float32' elif np_data.dtype == np.int32: dtype = 'int32' else: raise ValueError('The `dtype` must be one of {`float32`, `int32`}.') if type(math_engine) is MathEngine.GpuMathEngine: copy = True return Blob(PythonWrapper.tensor(math_engine._internal, shape, dtype, np_data, bool(copy)))
[ "def", "asblob", "(", "math_engine", ",", "data", ",", "shape", "=", "None", ",", "copy", "=", "False", ")", ":", "if", "shape", "is", "None", ":", "shape", "=", "np", ".", "ones", "(", "7", ",", "np", ".", "int32", ")", "else", ":", "shape", "=", "np", ".", "array", "(", "shape", ",", "dtype", "=", "np", ".", "int32", ",", "copy", "=", "False", ")", "if", "len", "(", "shape", ")", "!=", "7", ":", "raise", "ValueError", "(", "'The `shape` must have 7 dimension sizes.'", ")", "np_data", "=", "np", ".", "array", "(", "data", ",", "copy", "=", "False", ",", "order", "=", "'C'", ")", "if", "len", "(", "np_data", ".", "shape", ")", ">", "7", ":", "raise", "ValueError", "(", "'The `shape` must have not more then 7 dimensions.'", ")", "if", "np", ".", "prod", "(", "np_data", ".", "shape", ")", "!=", "np", ".", "prod", "(", "shape", ")", ":", "raise", "ValueError", "(", "'The blob must have as many elements as ndarray'", ")", "dtype", "=", "'none'", "if", "np_data", ".", "dtype", "==", "np", ".", "float32", ":", "dtype", "=", "'float32'", "elif", "np_data", ".", "dtype", "==", "np", ".", "int32", ":", "dtype", "=", "'int32'", "else", ":", "raise", "ValueError", "(", "'The `dtype` must be one of {`float32`, `int32`}.'", ")", "if", "type", "(", "math_engine", ")", "is", "MathEngine", ".", "GpuMathEngine", ":", "copy", "=", "True", "return", "Blob", "(", "PythonWrapper", ".", "tensor", "(", "math_engine", ".", "_internal", ",", "shape", ",", "dtype", ",", "np_data", ",", "bool", "(", "copy", ")", ")", ")" ]
https://github.com/neoml-lib/neoml/blob/a0d370fba05269a1b2258cef126f77bbd2054a3e/NeoML/Python/neoml/Blob.py#L294-L339
kamyu104/LeetCode-Solutions
77605708a927ea3b85aee5a479db733938c7c211
Python/max-chunks-to-make-sorted.py
python
Solution2.maxChunksToSorted
(self, arr)
return len(increasing_stk)
:type arr: List[int] :rtype: int
:type arr: List[int] :rtype: int
[ ":", "type", "arr", ":", "List", "[", "int", "]", ":", "rtype", ":", "int" ]
def maxChunksToSorted(self, arr): """ :type arr: List[int] :rtype: int """ result, increasing_stk = 0, [] for num in arr: max_num = num if not increasing_stk else max(increasing_stk[-1], num) while increasing_stk and increasing_stk[-1] > num: increasing_stk.pop() increasing_stk.append(max_num) return len(increasing_stk)
[ "def", "maxChunksToSorted", "(", "self", ",", "arr", ")", ":", "result", ",", "increasing_stk", "=", "0", ",", "[", "]", "for", "num", "in", "arr", ":", "max_num", "=", "num", "if", "not", "increasing_stk", "else", "max", "(", "increasing_stk", "[", "-", "1", "]", ",", "num", ")", "while", "increasing_stk", "and", "increasing_stk", "[", "-", "1", "]", ">", "num", ":", "increasing_stk", ".", "pop", "(", ")", "increasing_stk", ".", "append", "(", "max_num", ")", "return", "len", "(", "increasing_stk", ")" ]
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/max-chunks-to-make-sorted.py#L22-L33
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
Framework/PythonInterface/plugins/algorithms/EnggEstimateFocussedBackground.py
python
EnggEstimateFocussedBackground.doIterativeSmoothing
(self, y, nwindow, maxdepth, depth=0)
Iterative smoothing procedure to estimate the background in powder diffraction as published in Bruckner J. Appl. Cryst. (2000). 33, 977-979 :param y: signal to smooth :param n: size of window for convolution :param maxdepth: max depth of recursion (i.e. number of iterations) :param depth: current iteration :return:
Iterative smoothing procedure to estimate the background in powder diffraction as published in Bruckner J. Appl. Cryst. (2000). 33, 977-979 :param y: signal to smooth :param n: size of window for convolution :param maxdepth: max depth of recursion (i.e. number of iterations) :param depth: current iteration :return:
[ "Iterative", "smoothing", "procedure", "to", "estimate", "the", "background", "in", "powder", "diffraction", "as", "published", "in", "Bruckner", "J", ".", "Appl", ".", "Cryst", ".", "(", "2000", ")", ".", "33", "977", "-", "979", ":", "param", "y", ":", "signal", "to", "smooth", ":", "param", "n", ":", "size", "of", "window", "for", "convolution", ":", "param", "maxdepth", ":", "max", "depth", "of", "recursion", "(", "i", ".", "e", ".", "number", "of", "iterations", ")", ":", "param", "depth", ":", "current", "iteration", ":", "return", ":" ]
def doIterativeSmoothing(self, y, nwindow, maxdepth, depth=0): """ Iterative smoothing procedure to estimate the background in powder diffraction as published in Bruckner J. Appl. Cryst. (2000). 33, 977-979 :param y: signal to smooth :param n: size of window for convolution :param maxdepth: max depth of recursion (i.e. number of iterations) :param depth: current iteration :return: """ # smooth with hat function yy = np.copy(y) yy = np.convolve(yy, np.ones(nwindow) / nwindow, mode="same") # normalise end values effected by convolution ends = np.convolve(np.ones(nwindow), np.ones(nwindow) / nwindow, mode='same') yy[0:nwindow // 2] = yy[0:nwindow // 2] / ends[0:nwindow // 2] yy[-nwindow // 2:] = yy[-nwindow // 2:] / ends[-nwindow // 2:] if depth < maxdepth: # compare pt by pt with original and keep lowest idx = yy > y yy[idx] = y[idx] return self.doIterativeSmoothing(yy, nwindow, maxdepth, depth + 1) else: return yy
[ "def", "doIterativeSmoothing", "(", "self", ",", "y", ",", "nwindow", ",", "maxdepth", ",", "depth", "=", "0", ")", ":", "# smooth with hat function", "yy", "=", "np", ".", "copy", "(", "y", ")", "yy", "=", "np", ".", "convolve", "(", "yy", ",", "np", ".", "ones", "(", "nwindow", ")", "/", "nwindow", ",", "mode", "=", "\"same\"", ")", "# normalise end values effected by convolution", "ends", "=", "np", ".", "convolve", "(", "np", ".", "ones", "(", "nwindow", ")", ",", "np", ".", "ones", "(", "nwindow", ")", "/", "nwindow", ",", "mode", "=", "'same'", ")", "yy", "[", "0", ":", "nwindow", "//", "2", "]", "=", "yy", "[", "0", ":", "nwindow", "//", "2", "]", "/", "ends", "[", "0", ":", "nwindow", "//", "2", "]", "yy", "[", "-", "nwindow", "//", "2", ":", "]", "=", "yy", "[", "-", "nwindow", "//", "2", ":", "]", "/", "ends", "[", "-", "nwindow", "//", "2", ":", "]", "if", "depth", "<", "maxdepth", ":", "# compare pt by pt with original and keep lowest", "idx", "=", "yy", ">", "y", "yy", "[", "idx", "]", "=", "y", "[", "idx", "]", "return", "self", ".", "doIterativeSmoothing", "(", "yy", ",", "nwindow", ",", "maxdepth", ",", "depth", "+", "1", ")", "else", ":", "return", "yy" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/EnggEstimateFocussedBackground.py#L119-L142
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/scipy/signal/signaltools.py
python
decimate
(x, q, n=None, ftype='iir', axis=-1, zero_phase=None)
return y[sl]
Downsample the signal after applying an anti-aliasing filter. By default, an order 8 Chebyshev type I filter is used. A 30 point FIR filter with Hamming window is used if `ftype` is 'fir'. Parameters ---------- x : ndarray The signal to be downsampled, as an N-dimensional array. q : int The downsampling factor. For downsampling factors higher than 13, it is recommended to call `decimate` multiple times. n : int, optional The order of the filter (1 less than the length for 'fir'). Defaults to 8 for 'iir' and 30 for 'fir'. ftype : str {'iir', 'fir'} or ``dlti`` instance, optional If 'iir' or 'fir', specifies the type of lowpass filter. If an instance of an `dlti` object, uses that object to filter before downsampling. axis : int, optional The axis along which to decimate. zero_phase : bool, optional Prevent phase shift by filtering with `filtfilt` instead of `lfilter` when using an IIR filter, and shifting the outputs back by the filter's group delay when using an FIR filter. A value of ``True`` is recommended, since a phase shift is generally not desired. Using ``None`` defaults to ``False`` for backwards compatibility. This default will change to ``True`` in a future release, so it is best to set this argument explicitly. .. versionadded:: 0.18.0 Returns ------- y : ndarray The down-sampled signal. See Also -------- resample : Resample up or down using the FFT method. resample_poly : Resample using polyphase filtering and an FIR filter. Notes ----- The ``zero_phase`` keyword was added in 0.18.0. The possibility to use instances of ``dlti`` as ``ftype`` was added in 0.18.0.
Downsample the signal after applying an anti-aliasing filter.
[ "Downsample", "the", "signal", "after", "applying", "an", "anti", "-", "aliasing", "filter", "." ]
def decimate(x, q, n=None, ftype='iir', axis=-1, zero_phase=None): """ Downsample the signal after applying an anti-aliasing filter. By default, an order 8 Chebyshev type I filter is used. A 30 point FIR filter with Hamming window is used if `ftype` is 'fir'. Parameters ---------- x : ndarray The signal to be downsampled, as an N-dimensional array. q : int The downsampling factor. For downsampling factors higher than 13, it is recommended to call `decimate` multiple times. n : int, optional The order of the filter (1 less than the length for 'fir'). Defaults to 8 for 'iir' and 30 for 'fir'. ftype : str {'iir', 'fir'} or ``dlti`` instance, optional If 'iir' or 'fir', specifies the type of lowpass filter. If an instance of an `dlti` object, uses that object to filter before downsampling. axis : int, optional The axis along which to decimate. zero_phase : bool, optional Prevent phase shift by filtering with `filtfilt` instead of `lfilter` when using an IIR filter, and shifting the outputs back by the filter's group delay when using an FIR filter. A value of ``True`` is recommended, since a phase shift is generally not desired. Using ``None`` defaults to ``False`` for backwards compatibility. This default will change to ``True`` in a future release, so it is best to set this argument explicitly. .. versionadded:: 0.18.0 Returns ------- y : ndarray The down-sampled signal. See Also -------- resample : Resample up or down using the FFT method. resample_poly : Resample using polyphase filtering and an FIR filter. Notes ----- The ``zero_phase`` keyword was added in 0.18.0. The possibility to use instances of ``dlti`` as ``ftype`` was added in 0.18.0. """ if not isinstance(q, int): raise TypeError("q must be an integer") if n is not None and not isinstance(n, int): raise TypeError("n must be an integer") if ftype == 'fir': if n is None: n = 30 system = dlti(firwin(n+1, 1. / q, window='hamming'), 1.) elif ftype == 'iir': if n is None: n = 8 system = dlti(*cheby1(n, 0.05, 0.8 / q)) elif isinstance(ftype, dlti): system = ftype._as_tf() # Avoids copying if already in TF form n = np.max((system.num.size, system.den.size)) - 1 else: raise ValueError('invalid ftype') if zero_phase is None: warnings.warn(" Note: Decimate's zero_phase keyword argument will " "default to True in a future release. Until then, " "decimate defaults to one-way filtering for backwards " "compatibility. Ideally, always set this argument " "explicitly.", FutureWarning) zero_phase = False sl = [slice(None)] * x.ndim if len(system.den) == 1: # FIR case if zero_phase: y = resample_poly(x, 1, q, axis=axis, window=system.num) else: # upfirdn is generally faster than lfilter by a factor equal to the # downsampling factor, since it only calculates the needed outputs n_out = x.shape[axis] // q + bool(x.shape[axis] % q) y = upfirdn(system.num, x, up=1, down=q, axis=axis) sl[axis] = slice(None, n_out, None) else: # IIR case if zero_phase: y = filtfilt(system.num, system.den, x, axis=axis) else: y = lfilter(system.num, system.den, x, axis=axis) sl[axis] = slice(None, None, q) return y[sl]
[ "def", "decimate", "(", "x", ",", "q", ",", "n", "=", "None", ",", "ftype", "=", "'iir'", ",", "axis", "=", "-", "1", ",", "zero_phase", "=", "None", ")", ":", "if", "not", "isinstance", "(", "q", ",", "int", ")", ":", "raise", "TypeError", "(", "\"q must be an integer\"", ")", "if", "n", "is", "not", "None", "and", "not", "isinstance", "(", "n", ",", "int", ")", ":", "raise", "TypeError", "(", "\"n must be an integer\"", ")", "if", "ftype", "==", "'fir'", ":", "if", "n", "is", "None", ":", "n", "=", "30", "system", "=", "dlti", "(", "firwin", "(", "n", "+", "1", ",", "1.", "/", "q", ",", "window", "=", "'hamming'", ")", ",", "1.", ")", "elif", "ftype", "==", "'iir'", ":", "if", "n", "is", "None", ":", "n", "=", "8", "system", "=", "dlti", "(", "*", "cheby1", "(", "n", ",", "0.05", ",", "0.8", "/", "q", ")", ")", "elif", "isinstance", "(", "ftype", ",", "dlti", ")", ":", "system", "=", "ftype", ".", "_as_tf", "(", ")", "# Avoids copying if already in TF form", "n", "=", "np", ".", "max", "(", "(", "system", ".", "num", ".", "size", ",", "system", ".", "den", ".", "size", ")", ")", "-", "1", "else", ":", "raise", "ValueError", "(", "'invalid ftype'", ")", "if", "zero_phase", "is", "None", ":", "warnings", ".", "warn", "(", "\" Note: Decimate's zero_phase keyword argument will \"", "\"default to True in a future release. Until then, \"", "\"decimate defaults to one-way filtering for backwards \"", "\"compatibility. Ideally, always set this argument \"", "\"explicitly.\"", ",", "FutureWarning", ")", "zero_phase", "=", "False", "sl", "=", "[", "slice", "(", "None", ")", "]", "*", "x", ".", "ndim", "if", "len", "(", "system", ".", "den", ")", "==", "1", ":", "# FIR case", "if", "zero_phase", ":", "y", "=", "resample_poly", "(", "x", ",", "1", ",", "q", ",", "axis", "=", "axis", ",", "window", "=", "system", ".", "num", ")", "else", ":", "# upfirdn is generally faster than lfilter by a factor equal to the", "# downsampling factor, since it only calculates the needed outputs", "n_out", "=", "x", ".", "shape", "[", "axis", "]", "//", "q", "+", "bool", "(", "x", ".", "shape", "[", "axis", "]", "%", "q", ")", "y", "=", "upfirdn", "(", "system", ".", "num", ",", "x", ",", "up", "=", "1", ",", "down", "=", "q", ",", "axis", "=", "axis", ")", "sl", "[", "axis", "]", "=", "slice", "(", "None", ",", "n_out", ",", "None", ")", "else", ":", "# IIR case", "if", "zero_phase", ":", "y", "=", "filtfilt", "(", "system", ".", "num", ",", "system", ".", "den", ",", "x", ",", "axis", "=", "axis", ")", "else", ":", "y", "=", "lfilter", "(", "system", ".", "num", ",", "system", ".", "den", ",", "x", ",", "axis", "=", "axis", ")", "sl", "[", "axis", "]", "=", "slice", "(", "None", ",", "None", ",", "q", ")", "return", "y", "[", "sl", "]" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/signal/signaltools.py#L3314-L3411
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_x86_64/python2.7/dist-packages/geographic_msgs/msg/_RouteSegment.py
python
RouteSegment.__init__
(self, *args, **kwds)
Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: id,start,end,props :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields.
Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments.
[ "Constructor", ".", "Any", "message", "fields", "that", "are", "implicitly", "/", "explicitly", "set", "to", "None", "will", "be", "assigned", "a", "default", "value", ".", "The", "recommend", "use", "is", "keyword", "arguments", "as", "this", "is", "more", "robust", "to", "future", "message", "changes", ".", "You", "cannot", "mix", "in", "-", "order", "arguments", "and", "keyword", "arguments", "." ]
def __init__(self, *args, **kwds): """ Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: id,start,end,props :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields. """ if args or kwds: super(RouteSegment, self).__init__(*args, **kwds) #message fields cannot be None, assign default values for those that are if self.id is None: self.id = uuid_msgs.msg.UniqueID() if self.start is None: self.start = uuid_msgs.msg.UniqueID() if self.end is None: self.end = uuid_msgs.msg.UniqueID() if self.props is None: self.props = [] else: self.id = uuid_msgs.msg.UniqueID() self.start = uuid_msgs.msg.UniqueID() self.end = uuid_msgs.msg.UniqueID() self.props = []
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwds", ")", ":", "if", "args", "or", "kwds", ":", "super", "(", "RouteSegment", ",", "self", ")", ".", "__init__", "(", "*", "args", ",", "*", "*", "kwds", ")", "#message fields cannot be None, assign default values for those that are", "if", "self", ".", "id", "is", "None", ":", "self", ".", "id", "=", "uuid_msgs", ".", "msg", ".", "UniqueID", "(", ")", "if", "self", ".", "start", "is", "None", ":", "self", ".", "start", "=", "uuid_msgs", ".", "msg", ".", "UniqueID", "(", ")", "if", "self", ".", "end", "is", "None", ":", "self", ".", "end", "=", "uuid_msgs", ".", "msg", ".", "UniqueID", "(", ")", "if", "self", ".", "props", "is", "None", ":", "self", ".", "props", "=", "[", "]", "else", ":", "self", ".", "id", "=", "uuid_msgs", ".", "msg", ".", "UniqueID", "(", ")", "self", ".", "start", "=", "uuid_msgs", ".", "msg", ".", "UniqueID", "(", ")", "self", ".", "end", "=", "uuid_msgs", ".", "msg", ".", "UniqueID", "(", ")", "self", ".", "props", "=", "[", "]" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/geographic_msgs/msg/_RouteSegment.py#L51-L80
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/PIL/ImageFont.py
python
FreeTypeFont.font_variant
( self, font=None, size=None, index=None, encoding=None, layout_engine=None )
return FreeTypeFont( font=self.path if font is None else font, size=self.size if size is None else size, index=self.index if index is None else index, encoding=self.encoding if encoding is None else encoding, layout_engine=layout_engine or self.layout_engine, )
Create a copy of this FreeTypeFont object, using any specified arguments to override the settings. Parameters are identical to the parameters used to initialize this object. :return: A FreeTypeFont object.
Create a copy of this FreeTypeFont object, using any specified arguments to override the settings.
[ "Create", "a", "copy", "of", "this", "FreeTypeFont", "object", "using", "any", "specified", "arguments", "to", "override", "the", "settings", "." ]
def font_variant( self, font=None, size=None, index=None, encoding=None, layout_engine=None ): """ Create a copy of this FreeTypeFont object, using any specified arguments to override the settings. Parameters are identical to the parameters used to initialize this object. :return: A FreeTypeFont object. """ return FreeTypeFont( font=self.path if font is None else font, size=self.size if size is None else size, index=self.index if index is None else index, encoding=self.encoding if encoding is None else encoding, layout_engine=layout_engine or self.layout_engine, )
[ "def", "font_variant", "(", "self", ",", "font", "=", "None", ",", "size", "=", "None", ",", "index", "=", "None", ",", "encoding", "=", "None", ",", "layout_engine", "=", "None", ")", ":", "return", "FreeTypeFont", "(", "font", "=", "self", ".", "path", "if", "font", "is", "None", "else", "font", ",", "size", "=", "self", ".", "size", "if", "size", "is", "None", "else", "size", ",", "index", "=", "self", ".", "index", "if", "index", "is", "None", "else", "index", ",", "encoding", "=", "self", ".", "encoding", "if", "encoding", "is", "None", "else", "encoding", ",", "layout_engine", "=", "layout_engine", "or", "self", ".", "layout_engine", ",", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/PIL/ImageFont.py#L479-L497
scribusproject/scribus
41ec7c775a060912cf251682a8b1437f753f80f4
scribus/plugins/scriptplugin_py2x/scripts/CalendarWizard.py
python
ScClassicCalendar.createMonthCalendar
(self, month, cal)
Create a page and draw one month calendar on it
Create a page and draw one month calendar on it
[ "Create", "a", "page", "and", "draw", "one", "month", "calendar", "on", "it" ]
def createMonthCalendar(self, month, cal): """ Create a page and draw one month calendar on it """ self.createLayout() self.createHeader(localization[self.lang][0][month]) rowCnt = 2 for week in cal: colCnt = 0 for day in week: cel = createText(self.marginl + colCnt * self.colSize, self.calHeight + rowCnt * self.rowSize, self.colSize, self.rowSize) setLineColor("Black", cel) # comment this out if you do not want border to cells colCnt += 1 if day.month == month + 1: setText(str(day.day), cel) setStyle(self.pStyleDate, cel) rowCnt += 1
[ "def", "createMonthCalendar", "(", "self", ",", "month", ",", "cal", ")", ":", "self", ".", "createLayout", "(", ")", "self", ".", "createHeader", "(", "localization", "[", "self", ".", "lang", "]", "[", "0", "]", "[", "month", "]", ")", "rowCnt", "=", "2", "for", "week", "in", "cal", ":", "colCnt", "=", "0", "for", "day", "in", "week", ":", "cel", "=", "createText", "(", "self", ".", "marginl", "+", "colCnt", "*", "self", ".", "colSize", ",", "self", ".", "calHeight", "+", "rowCnt", "*", "self", ".", "rowSize", ",", "self", ".", "colSize", ",", "self", ".", "rowSize", ")", "setLineColor", "(", "\"Black\"", ",", "cel", ")", "# comment this out if you do not want border to cells", "colCnt", "+=", "1", "if", "day", ".", "month", "==", "month", "+", "1", ":", "setText", "(", "str", "(", "day", ".", "day", ")", ",", "cel", ")", "setStyle", "(", "self", ".", "pStyleDate", ",", "cel", ")", "rowCnt", "+=", "1" ]
https://github.com/scribusproject/scribus/blob/41ec7c775a060912cf251682a8b1437f753f80f4/scribus/plugins/scriptplugin_py2x/scripts/CalendarWizard.py#L437-L453
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/AWSPythonSDK/1.5.8/botocore/vendored/requests/cookies.py
python
morsel_to_cookie
(morsel)
return create_cookie( comment=morsel['comment'], comment_url=bool(morsel['comment']), discard=False, domain=morsel['domain'], expires=expires, name=morsel.key, path=morsel['path'], port=None, rest={'HttpOnly': morsel['httponly']}, rfc2109=False, secure=bool(morsel['secure']), value=morsel.value, version=morsel['version'] or 0, )
Convert a Morsel object into a Cookie containing the one k/v pair.
Convert a Morsel object into a Cookie containing the one k/v pair.
[ "Convert", "a", "Morsel", "object", "into", "a", "Cookie", "containing", "the", "one", "k", "/", "v", "pair", "." ]
def morsel_to_cookie(morsel): """Convert a Morsel object into a Cookie containing the one k/v pair.""" expires = None if morsel['max-age']: expires = time.time() + morsel['max-age'] elif morsel['expires']: time_template = '%a, %d-%b-%Y %H:%M:%S GMT' expires = time.mktime( time.strptime(morsel['expires'], time_template)) - time.timezone return create_cookie( comment=morsel['comment'], comment_url=bool(morsel['comment']), discard=False, domain=morsel['domain'], expires=expires, name=morsel.key, path=morsel['path'], port=None, rest={'HttpOnly': morsel['httponly']}, rfc2109=False, secure=bool(morsel['secure']), value=morsel.value, version=morsel['version'] or 0, )
[ "def", "morsel_to_cookie", "(", "morsel", ")", ":", "expires", "=", "None", "if", "morsel", "[", "'max-age'", "]", ":", "expires", "=", "time", ".", "time", "(", ")", "+", "morsel", "[", "'max-age'", "]", "elif", "morsel", "[", "'expires'", "]", ":", "time_template", "=", "'%a, %d-%b-%Y %H:%M:%S GMT'", "expires", "=", "time", ".", "mktime", "(", "time", ".", "strptime", "(", "morsel", "[", "'expires'", "]", ",", "time_template", ")", ")", "-", "time", ".", "timezone", "return", "create_cookie", "(", "comment", "=", "morsel", "[", "'comment'", "]", ",", "comment_url", "=", "bool", "(", "morsel", "[", "'comment'", "]", ")", ",", "discard", "=", "False", ",", "domain", "=", "morsel", "[", "'domain'", "]", ",", "expires", "=", "expires", ",", "name", "=", "morsel", ".", "key", ",", "path", "=", "morsel", "[", "'path'", "]", ",", "port", "=", "None", ",", "rest", "=", "{", "'HttpOnly'", ":", "morsel", "[", "'httponly'", "]", "}", ",", "rfc2109", "=", "False", ",", "secure", "=", "bool", "(", "morsel", "[", "'secure'", "]", ")", ",", "value", "=", "morsel", ".", "value", ",", "version", "=", "morsel", "[", "'version'", "]", "or", "0", ",", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/botocore/vendored/requests/cookies.py#L413-L437
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/propgrid.py
python
PropertyGrid.GetEditorControl
(*args, **kwargs)
return _propgrid.PropertyGrid_GetEditorControl(*args, **kwargs)
GetEditorControl(self) -> Window
GetEditorControl(self) -> Window
[ "GetEditorControl", "(", "self", ")", "-", ">", "Window" ]
def GetEditorControl(*args, **kwargs): """GetEditorControl(self) -> Window""" return _propgrid.PropertyGrid_GetEditorControl(*args, **kwargs)
[ "def", "GetEditorControl", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_propgrid", ".", "PropertyGrid_GetEditorControl", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/propgrid.py#L2380-L2382
happynear/caffe-windows
967eedf25009e334b7f6f933bb5e17aaaff5bef6
scripts/cpp_lint.py
python
CheckAccess
(filename, clean_lines, linenum, nesting_state, error)
Checks for improper use of DISALLOW* macros. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. nesting_state: A _NestingState instance which maintains information about the current stack of nested blocks being parsed. error: The function to call with any errors found.
Checks for improper use of DISALLOW* macros.
[ "Checks", "for", "improper", "use", "of", "DISALLOW", "*", "macros", "." ]
def CheckAccess(filename, clean_lines, linenum, nesting_state, error): """Checks for improper use of DISALLOW* macros. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. nesting_state: A _NestingState instance which maintains information about the current stack of nested blocks being parsed. error: The function to call with any errors found. """ line = clean_lines.elided[linenum] # get rid of comments and strings matched = Match((r'\s*(DISALLOW_COPY_AND_ASSIGN|' r'DISALLOW_EVIL_CONSTRUCTORS|' r'DISALLOW_IMPLICIT_CONSTRUCTORS)'), line) if not matched: return if nesting_state.stack and isinstance(nesting_state.stack[-1], _ClassInfo): if nesting_state.stack[-1].access != 'private': error(filename, linenum, 'readability/constructors', 3, '%s must be in the private: section' % matched.group(1)) else: # Found DISALLOW* macro outside a class declaration, or perhaps it # was used inside a function when it should have been part of the # class declaration. We could issue a warning here, but it # probably resulted in a compiler error already. pass
[ "def", "CheckAccess", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "nesting_state", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "# get rid of comments and strings", "matched", "=", "Match", "(", "(", "r'\\s*(DISALLOW_COPY_AND_ASSIGN|'", "r'DISALLOW_EVIL_CONSTRUCTORS|'", "r'DISALLOW_IMPLICIT_CONSTRUCTORS)'", ")", ",", "line", ")", "if", "not", "matched", ":", "return", "if", "nesting_state", ".", "stack", "and", "isinstance", "(", "nesting_state", ".", "stack", "[", "-", "1", "]", ",", "_ClassInfo", ")", ":", "if", "nesting_state", ".", "stack", "[", "-", "1", "]", ".", "access", "!=", "'private'", ":", "error", "(", "filename", ",", "linenum", ",", "'readability/constructors'", ",", "3", ",", "'%s must be in the private: section'", "%", "matched", ".", "group", "(", "1", ")", ")", "else", ":", "# Found DISALLOW* macro outside a class declaration, or perhaps it", "# was used inside a function when it should have been part of the", "# class declaration. We could issue a warning here, but it", "# probably resulted in a compiler error already.", "pass" ]
https://github.com/happynear/caffe-windows/blob/967eedf25009e334b7f6f933bb5e17aaaff5bef6/scripts/cpp_lint.py#L2490-L2518
hpi-xnor/BMXNet-v2
af2b1859eafc5c721b1397cef02f946aaf2ce20d
python/mxnet/symbol/symbol.py
python
Symbol.argmin
(self, *args, **kwargs)
return op.argmin(self, *args, **kwargs)
Convenience fluent method for :py:func:`argmin`. The arguments are the same as for :py:func:`argmin`, with this array as data.
Convenience fluent method for :py:func:`argmin`.
[ "Convenience", "fluent", "method", "for", ":", "py", ":", "func", ":", "argmin", "." ]
def argmin(self, *args, **kwargs): """Convenience fluent method for :py:func:`argmin`. The arguments are the same as for :py:func:`argmin`, with this array as data. """ return op.argmin(self, *args, **kwargs)
[ "def", "argmin", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "op", ".", "argmin", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/symbol/symbol.py#L2054-L2060
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/itanium_mangler.py
python
mangle_templated_ident
(identifier, parameters)
return mangle_identifier(identifier, template_params)
Mangle templated identifier.
Mangle templated identifier.
[ "Mangle", "templated", "identifier", "." ]
def mangle_templated_ident(identifier, parameters): """ Mangle templated identifier. """ template_params = ('I%sE' % ''.join(map(mangle_type_or_value, parameters)) if parameters else '') return mangle_identifier(identifier, template_params)
[ "def", "mangle_templated_ident", "(", "identifier", ",", "parameters", ")", ":", "template_params", "=", "(", "'I%sE'", "%", "''", ".", "join", "(", "map", "(", "mangle_type_or_value", ",", "parameters", ")", ")", "if", "parameters", "else", "''", ")", "return", "mangle_identifier", "(", "identifier", ",", "template_params", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/itanium_mangler.py#L183-L189
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/difflib.py
python
SequenceMatcher.get_opcodes
(self)
return answer
Return list of 5-tuples describing how to turn a into b. Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the tuple preceding it, and likewise for j1 == the previous j2. The tags are strings, with these meanings: 'replace': a[i1:i2] should be replaced by b[j1:j2] 'delete': a[i1:i2] should be deleted. Note that j1==j2 in this case. 'insert': b[j1:j2] should be inserted at a[i1:i1]. Note that i1==i2 in this case. 'equal': a[i1:i2] == b[j1:j2] >>> a = "qabxcd" >>> b = "abycdf" >>> s = SequenceMatcher(None, a, b) >>> for tag, i1, i2, j1, j2 in s.get_opcodes(): ... print(("%7s a[%d:%d] (%s) b[%d:%d] (%s)" % ... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))) delete a[0:1] (q) b[0:0] () equal a[1:3] (ab) b[0:2] (ab) replace a[3:4] (x) b[2:3] (y) equal a[4:6] (cd) b[3:5] (cd) insert a[6:6] () b[5:6] (f)
Return list of 5-tuples describing how to turn a into b.
[ "Return", "list", "of", "5", "-", "tuples", "describing", "how", "to", "turn", "a", "into", "b", "." ]
def get_opcodes(self): """Return list of 5-tuples describing how to turn a into b. Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the tuple preceding it, and likewise for j1 == the previous j2. The tags are strings, with these meanings: 'replace': a[i1:i2] should be replaced by b[j1:j2] 'delete': a[i1:i2] should be deleted. Note that j1==j2 in this case. 'insert': b[j1:j2] should be inserted at a[i1:i1]. Note that i1==i2 in this case. 'equal': a[i1:i2] == b[j1:j2] >>> a = "qabxcd" >>> b = "abycdf" >>> s = SequenceMatcher(None, a, b) >>> for tag, i1, i2, j1, j2 in s.get_opcodes(): ... print(("%7s a[%d:%d] (%s) b[%d:%d] (%s)" % ... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))) delete a[0:1] (q) b[0:0] () equal a[1:3] (ab) b[0:2] (ab) replace a[3:4] (x) b[2:3] (y) equal a[4:6] (cd) b[3:5] (cd) insert a[6:6] () b[5:6] (f) """ if self.opcodes is not None: return self.opcodes i = j = 0 self.opcodes = answer = [] for ai, bj, size in self.get_matching_blocks(): # invariant: we've pumped out correct diffs to change # a[:i] into b[:j], and the next matching block is # a[ai:ai+size] == b[bj:bj+size]. So we need to pump # out a diff to change a[i:ai] into b[j:bj], pump out # the matching block, and move (i,j) beyond the match tag = '' if i < ai and j < bj: tag = 'replace' elif i < ai: tag = 'delete' elif j < bj: tag = 'insert' if tag: answer.append( (tag, i, ai, j, bj) ) i, j = ai+size, bj+size # the list of matching blocks is terminated by a # sentinel with size 0 if size: answer.append( ('equal', ai, i, bj, j) ) return answer
[ "def", "get_opcodes", "(", "self", ")", ":", "if", "self", ".", "opcodes", "is", "not", "None", ":", "return", "self", ".", "opcodes", "i", "=", "j", "=", "0", "self", ".", "opcodes", "=", "answer", "=", "[", "]", "for", "ai", ",", "bj", ",", "size", "in", "self", ".", "get_matching_blocks", "(", ")", ":", "# invariant: we've pumped out correct diffs to change", "# a[:i] into b[:j], and the next matching block is", "# a[ai:ai+size] == b[bj:bj+size]. So we need to pump", "# out a diff to change a[i:ai] into b[j:bj], pump out", "# the matching block, and move (i,j) beyond the match", "tag", "=", "''", "if", "i", "<", "ai", "and", "j", "<", "bj", ":", "tag", "=", "'replace'", "elif", "i", "<", "ai", ":", "tag", "=", "'delete'", "elif", "j", "<", "bj", ":", "tag", "=", "'insert'", "if", "tag", ":", "answer", ".", "append", "(", "(", "tag", ",", "i", ",", "ai", ",", "j", ",", "bj", ")", ")", "i", ",", "j", "=", "ai", "+", "size", ",", "bj", "+", "size", "# the list of matching blocks is terminated by a", "# sentinel with size 0", "if", "size", ":", "answer", ".", "append", "(", "(", "'equal'", ",", "ai", ",", "i", ",", "bj", ",", "j", ")", ")", "return", "answer" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/difflib.py#L517-L570
trailofbits/sienna-locomotive
09bc1a0bea7d7a33089422c62e0d3c715ecb7ce0
sl2/harness/state.py
python
get_target_dir
(_config)
return dir_name
Gets (or creates) the path to a target directory for the current config file.
Gets (or creates) the path to a target directory for the current config file.
[ "Gets", "(", "or", "creates", ")", "the", "path", "to", "a", "target", "directory", "for", "the", "current", "config", "file", "." ]
def get_target_dir(_config): """ Gets (or creates) the path to a target directory for the current config file. """ slug = get_target_slug(_config) dir_name = os.path.join(config.sl2_targets_dir, slug) if not os.path.isdir(dir_name): os.makedirs(dir_name) arg_file = os.path.join(dir_name, "arguments.txt") if not os.path.exists(arg_file): with open(arg_file, "w") as argfile: argfile.write(stringify_program_array(_config["target_application_path"], _config["target_args"])) # Primes the db for checksec for this target if it doesn't already exist db.TargetConfig.bySlug(slug, _config["target_application_path"]) return dir_name
[ "def", "get_target_dir", "(", "_config", ")", ":", "slug", "=", "get_target_slug", "(", "_config", ")", "dir_name", "=", "os", ".", "path", ".", "join", "(", "config", ".", "sl2_targets_dir", ",", "slug", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "dir_name", ")", ":", "os", ".", "makedirs", "(", "dir_name", ")", "arg_file", "=", "os", ".", "path", ".", "join", "(", "dir_name", ",", "\"arguments.txt\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "arg_file", ")", ":", "with", "open", "(", "arg_file", ",", "\"w\"", ")", "as", "argfile", ":", "argfile", ".", "write", "(", "stringify_program_array", "(", "_config", "[", "\"target_application_path\"", "]", ",", "_config", "[", "\"target_args\"", "]", ")", ")", "# Primes the db for checksec for this target if it doesn't already exist", "db", ".", "TargetConfig", ".", "bySlug", "(", "slug", ",", "_config", "[", "\"target_application_path\"", "]", ")", "return", "dir_name" ]
https://github.com/trailofbits/sienna-locomotive/blob/09bc1a0bea7d7a33089422c62e0d3c715ecb7ce0/sl2/harness/state.py#L119-L137
SoarGroup/Soar
a1c5e249499137a27da60533c72969eef3b8ab6b
scons/scons-local-4.1.0/SCons/Tool/__init__.py
python
ToolInitializer.apply_tools
(self, env)
Searches the list of associated Tool modules for one that exists, and applies that to the construction environment.
Searches the list of associated Tool modules for one that exists, and applies that to the construction environment.
[ "Searches", "the", "list", "of", "associated", "Tool", "modules", "for", "one", "that", "exists", "and", "applies", "that", "to", "the", "construction", "environment", "." ]
def apply_tools(self, env): """ Searches the list of associated Tool modules for one that exists, and applies that to the construction environment. """ for t in self.tools: tool = SCons.Tool.Tool(t) if tool.exists(env): env.Tool(tool) return
[ "def", "apply_tools", "(", "self", ",", "env", ")", ":", "for", "t", "in", "self", ".", "tools", ":", "tool", "=", "SCons", ".", "Tool", ".", "Tool", "(", "t", ")", "if", "tool", ".", "exists", "(", "env", ")", ":", "env", ".", "Tool", "(", "tool", ")", "return" ]
https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/Tool/__init__.py#L640-L649
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_windows.py
python
ColourData.GetColour
(*args, **kwargs)
return _windows_.ColourData_GetColour(*args, **kwargs)
GetColour(self) -> Colour Gets the colour (pre)selected by the dialog.
GetColour(self) -> Colour
[ "GetColour", "(", "self", ")", "-", ">", "Colour" ]
def GetColour(*args, **kwargs): """ GetColour(self) -> Colour Gets the colour (pre)selected by the dialog. """ return _windows_.ColourData_GetColour(*args, **kwargs)
[ "def", "GetColour", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_windows_", ".", "ColourData_GetColour", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_windows.py#L2941-L2947
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/mapreduce/mapreduce/api/map_job/abstract_datastore_input_reader.py
python
AbstractDatastoreInputReader._get_raw_entity_kind
(cls, entity_kind_or_model_classpath)
return entity_kind_or_model_classpath
Returns the entity kind to use with low level datastore calls. Args: entity_kind_or_model_classpath: user specified entity kind or model classpath. Returns: the entity kind in str to use with low level datastore calls.
Returns the entity kind to use with low level datastore calls.
[ "Returns", "the", "entity", "kind", "to", "use", "with", "low", "level", "datastore", "calls", "." ]
def _get_raw_entity_kind(cls, entity_kind_or_model_classpath): """Returns the entity kind to use with low level datastore calls. Args: entity_kind_or_model_classpath: user specified entity kind or model classpath. Returns: the entity kind in str to use with low level datastore calls. """ return entity_kind_or_model_classpath
[ "def", "_get_raw_entity_kind", "(", "cls", ",", "entity_kind_or_model_classpath", ")", ":", "return", "entity_kind_or_model_classpath" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/mapreduce/mapreduce/api/map_job/abstract_datastore_input_reader.py#L311-L321
bairdzhang/smallhardface
76fa1d87a9602d9b13d7a7fe693fc7aec91cab80
caffe/python/caffe/io.py
python
Transformer.set_mean
(self, in_, mean)
Set the mean to subtract for centering the data. Parameters ---------- in_ : which input to assign this mean. mean : mean ndarray (input dimensional or broadcastable)
Set the mean to subtract for centering the data.
[ "Set", "the", "mean", "to", "subtract", "for", "centering", "the", "data", "." ]
def set_mean(self, in_, mean): """ Set the mean to subtract for centering the data. Parameters ---------- in_ : which input to assign this mean. mean : mean ndarray (input dimensional or broadcastable) """ self.__check_input(in_) ms = mean.shape if mean.ndim == 1: # broadcast channels if ms[0] != self.inputs[in_][1]: raise ValueError('Mean channels incompatible with input.') mean = mean[:, np.newaxis, np.newaxis] else: # elementwise mean if len(ms) == 2: ms = (1,) + ms if len(ms) != 3: raise ValueError('Mean shape invalid') if ms != self.inputs[in_][1:]: raise ValueError('Mean shape incompatible with input shape.') self.mean[in_] = mean
[ "def", "set_mean", "(", "self", ",", "in_", ",", "mean", ")", ":", "self", ".", "__check_input", "(", "in_", ")", "ms", "=", "mean", ".", "shape", "if", "mean", ".", "ndim", "==", "1", ":", "# broadcast channels", "if", "ms", "[", "0", "]", "!=", "self", ".", "inputs", "[", "in_", "]", "[", "1", "]", ":", "raise", "ValueError", "(", "'Mean channels incompatible with input.'", ")", "mean", "=", "mean", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "else", ":", "# elementwise mean", "if", "len", "(", "ms", ")", "==", "2", ":", "ms", "=", "(", "1", ",", ")", "+", "ms", "if", "len", "(", "ms", ")", "!=", "3", ":", "raise", "ValueError", "(", "'Mean shape invalid'", ")", "if", "ms", "!=", "self", ".", "inputs", "[", "in_", "]", "[", "1", ":", "]", ":", "raise", "ValueError", "(", "'Mean shape incompatible with input shape.'", ")", "self", ".", "mean", "[", "in_", "]", "=", "mean" ]
https://github.com/bairdzhang/smallhardface/blob/76fa1d87a9602d9b13d7a7fe693fc7aec91cab80/caffe/python/caffe/io.py#L236-L260
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/contrib/layers/python/layers/layers.py
python
_sparse_inner_flatten
(inputs, new_rank)
return flattened
Helper function for `inner_flatten`.
Helper function for `inner_flatten`.
[ "Helper", "function", "for", "inner_flatten", "." ]
def _sparse_inner_flatten(inputs, new_rank): """Helper function for `inner_flatten`.""" inputs_rank = inputs.dense_shape.get_shape().as_list()[0] if inputs_rank < new_rank: raise ValueError( 'Inputs has rank less than new_rank. {} must have rank at least' ' {}. Received rank {}, shape {}'.format(inputs, new_rank, inputs_rank, inputs.get_shape())) outer_dimensions = inputs.dense_shape[:new_rank - 1] inner_dimensions = inputs.dense_shape[new_rank - 1:] new_shape = array_ops.concat((outer_dimensions, [math_ops.reduce_prod(inner_dimensions)]), 0) flattened = sparse_ops.sparse_reshape(inputs, new_shape) return flattened
[ "def", "_sparse_inner_flatten", "(", "inputs", ",", "new_rank", ")", ":", "inputs_rank", "=", "inputs", ".", "dense_shape", ".", "get_shape", "(", ")", ".", "as_list", "(", ")", "[", "0", "]", "if", "inputs_rank", "<", "new_rank", ":", "raise", "ValueError", "(", "'Inputs has rank less than new_rank. {} must have rank at least'", "' {}. Received rank {}, shape {}'", ".", "format", "(", "inputs", ",", "new_rank", ",", "inputs_rank", ",", "inputs", ".", "get_shape", "(", ")", ")", ")", "outer_dimensions", "=", "inputs", ".", "dense_shape", "[", ":", "new_rank", "-", "1", "]", "inner_dimensions", "=", "inputs", ".", "dense_shape", "[", "new_rank", "-", "1", ":", "]", "new_shape", "=", "array_ops", ".", "concat", "(", "(", "outer_dimensions", ",", "[", "math_ops", ".", "reduce_prod", "(", "inner_dimensions", ")", "]", ")", ",", "0", ")", "flattened", "=", "sparse_ops", ".", "sparse_reshape", "(", "inputs", ",", "new_shape", ")", "return", "flattened" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/layers/python/layers/layers.py#L1460-L1474
eldar/deepcut-cnn
928bf2f224fce132f6e4404b4c95fb017297a5e0
python/caffe/detector.py
python
Detector.detect_windows
(self, images_windows)
return detections
Do windowed detection over given images and windows. Windows are extracted then warped to the input dimensions of the net. Parameters ---------- images_windows: (image filename, window list) iterable. context_crop: size of context border to crop in pixels. Returns ------- detections: list of {filename: image filename, window: crop coordinates, predictions: prediction vector} dicts.
Do windowed detection over given images and windows. Windows are extracted then warped to the input dimensions of the net.
[ "Do", "windowed", "detection", "over", "given", "images", "and", "windows", ".", "Windows", "are", "extracted", "then", "warped", "to", "the", "input", "dimensions", "of", "the", "net", "." ]
def detect_windows(self, images_windows): """ Do windowed detection over given images and windows. Windows are extracted then warped to the input dimensions of the net. Parameters ---------- images_windows: (image filename, window list) iterable. context_crop: size of context border to crop in pixels. Returns ------- detections: list of {filename: image filename, window: crop coordinates, predictions: prediction vector} dicts. """ # Extract windows. window_inputs = [] for image_fname, windows in images_windows: image = caffe.io.load_image(image_fname).astype(np.float32) for window in windows: window_inputs.append(self.crop(image, window)) # Run through the net (warping windows to input dimensions). in_ = self.inputs[0] caffe_in = np.zeros((len(window_inputs), window_inputs[0].shape[2]) + self.blobs[in_].data.shape[2:], dtype=np.float32) for ix, window_in in enumerate(window_inputs): caffe_in[ix] = self.transformer.preprocess(in_, window_in) out = self.forward_all(**{in_: caffe_in}) predictions = out[self.outputs[0]].squeeze(axis=(2, 3)) # Package predictions with images and windows. detections = [] ix = 0 for image_fname, windows in images_windows: for window in windows: detections.append({ 'window': window, 'prediction': predictions[ix], 'filename': image_fname }) ix += 1 return detections
[ "def", "detect_windows", "(", "self", ",", "images_windows", ")", ":", "# Extract windows.", "window_inputs", "=", "[", "]", "for", "image_fname", ",", "windows", "in", "images_windows", ":", "image", "=", "caffe", ".", "io", ".", "load_image", "(", "image_fname", ")", ".", "astype", "(", "np", ".", "float32", ")", "for", "window", "in", "windows", ":", "window_inputs", ".", "append", "(", "self", ".", "crop", "(", "image", ",", "window", ")", ")", "# Run through the net (warping windows to input dimensions).", "in_", "=", "self", ".", "inputs", "[", "0", "]", "caffe_in", "=", "np", ".", "zeros", "(", "(", "len", "(", "window_inputs", ")", ",", "window_inputs", "[", "0", "]", ".", "shape", "[", "2", "]", ")", "+", "self", ".", "blobs", "[", "in_", "]", ".", "data", ".", "shape", "[", "2", ":", "]", ",", "dtype", "=", "np", ".", "float32", ")", "for", "ix", ",", "window_in", "in", "enumerate", "(", "window_inputs", ")", ":", "caffe_in", "[", "ix", "]", "=", "self", ".", "transformer", ".", "preprocess", "(", "in_", ",", "window_in", ")", "out", "=", "self", ".", "forward_all", "(", "*", "*", "{", "in_", ":", "caffe_in", "}", ")", "predictions", "=", "out", "[", "self", ".", "outputs", "[", "0", "]", "]", ".", "squeeze", "(", "axis", "=", "(", "2", ",", "3", ")", ")", "# Package predictions with images and windows.", "detections", "=", "[", "]", "ix", "=", "0", "for", "image_fname", ",", "windows", "in", "images_windows", ":", "for", "window", "in", "windows", ":", "detections", ".", "append", "(", "{", "'window'", ":", "window", ",", "'prediction'", ":", "predictions", "[", "ix", "]", ",", "'filename'", ":", "image_fname", "}", ")", "ix", "+=", "1", "return", "detections" ]
https://github.com/eldar/deepcut-cnn/blob/928bf2f224fce132f6e4404b4c95fb017297a5e0/python/caffe/detector.py#L56-L99
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/framework/convert_to_constants.py
python
_populate_if_op
(output_node, input_node, function_data)
Updates the type attributes and function names of If or StatelessIf. Args: output_node: TensorFlow NodeDef. input_node: TensorFlow NodeDef. function_data: Map of function names to the list of types and shapes that correspond with the function arguments.
Updates the type attributes and function names of If or StatelessIf.
[ "Updates", "the", "type", "attributes", "and", "function", "names", "of", "If", "or", "StatelessIf", "." ]
def _populate_if_op(output_node, input_node, function_data): """Updates the type attributes and function names of If or StatelessIf. Args: output_node: TensorFlow NodeDef. input_node: TensorFlow NodeDef. function_data: Map of function names to the list of types and shapes that correspond with the function arguments. """ output_node.CopyFrom(input_node) then_func = input_node.attr["then_branch"].func.name output_node.attr["then_branch"].func.name = _get_new_function_name(then_func) output_node.attr["else_branch"].func.name = _get_new_function_name( input_node.attr["else_branch"].func.name) output_node.attr["Tin"].list.CopyFrom( attr_value_pb2.AttrValue.ListValue( type=function_data[then_func]["types"]))
[ "def", "_populate_if_op", "(", "output_node", ",", "input_node", ",", "function_data", ")", ":", "output_node", ".", "CopyFrom", "(", "input_node", ")", "then_func", "=", "input_node", ".", "attr", "[", "\"then_branch\"", "]", ".", "func", ".", "name", "output_node", ".", "attr", "[", "\"then_branch\"", "]", ".", "func", ".", "name", "=", "_get_new_function_name", "(", "then_func", ")", "output_node", ".", "attr", "[", "\"else_branch\"", "]", ".", "func", ".", "name", "=", "_get_new_function_name", "(", "input_node", ".", "attr", "[", "\"else_branch\"", "]", ".", "func", ".", "name", ")", "output_node", ".", "attr", "[", "\"Tin\"", "]", ".", "list", ".", "CopyFrom", "(", "attr_value_pb2", ".", "AttrValue", ".", "ListValue", "(", "type", "=", "function_data", "[", "then_func", "]", "[", "\"types\"", "]", ")", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/framework/convert_to_constants.py#L303-L319
wallix/redemption
fb4ceefb39e11e1ae250bce17e878e1dc7d195d2
tools/icap_validator/fake_validator.py
python
read_session_msg
(client_socket)
return False, received_message, message_type
Read session client socket received data, parse the header to return read message type and True if no error is detected
Read session client socket received data, parse the header to return read message type and True if no error is detected
[ "Read", "session", "client", "socket", "received", "data", "parse", "the", "header", "to", "return", "read", "message", "type", "and", "True", "if", "no", "error", "is", "detected" ]
def read_session_msg(client_socket): """ Read session client socket received data, parse the header to return read message type and True if no error is detected """ message_type = -1 received_message = b"" ok, header_received = read_session_socket(client_socket, 5) if ok: message_type, message_len = struct.unpack_from(">BI", header_received) ok, received_message = read_session_socket(client_socket, message_len) if ok: return True, received_message, message_type return False, received_message, message_type
[ "def", "read_session_msg", "(", "client_socket", ")", ":", "message_type", "=", "-", "1", "received_message", "=", "b\"\"", "ok", ",", "header_received", "=", "read_session_socket", "(", "client_socket", ",", "5", ")", "if", "ok", ":", "message_type", ",", "message_len", "=", "struct", ".", "unpack_from", "(", "\">BI\"", ",", "header_received", ")", "ok", ",", "received_message", "=", "read_session_socket", "(", "client_socket", ",", "message_len", ")", "if", "ok", ":", "return", "True", ",", "received_message", ",", "message_type", "return", "False", ",", "received_message", ",", "message_type" ]
https://github.com/wallix/redemption/blob/fb4ceefb39e11e1ae250bce17e878e1dc7d195d2/tools/icap_validator/fake_validator.py#L28-L43
tfwu/FaceDetection-ConvNet-3D
f9251c48eb40c5aec8fba7455115c355466555be
python/mxnet/metric.py
python
CompositeEvalMetric.get_metric
(self, index)
Get a child metric.
Get a child metric.
[ "Get", "a", "child", "metric", "." ]
def get_metric(self, index): """Get a child metric.""" try: return self.metrics[index] except IndexError: return ValueError("Metric index {} is out of range 0 and {}".format( index, len(self.metrics)))
[ "def", "get_metric", "(", "self", ",", "index", ")", ":", "try", ":", "return", "self", ".", "metrics", "[", "index", "]", "except", "IndexError", ":", "return", "ValueError", "(", "\"Metric index {} is out of range 0 and {}\"", ".", "format", "(", "index", ",", "len", "(", "self", ".", "metrics", ")", ")", ")" ]
https://github.com/tfwu/FaceDetection-ConvNet-3D/blob/f9251c48eb40c5aec8fba7455115c355466555be/python/mxnet/metric.py#L95-L101
llvm-mirror/lldb
d01083a850f577b85501a0902b52fd0930de72c7
third_party/Python/module/pexpect-4.6/pexpect/pty_spawn.py
python
spawn.terminate
(self, force=False)
This forces a child process to terminate. It starts nicely with SIGHUP and SIGINT. If "force" is True then moves onto SIGKILL. This returns True if the child was terminated. This returns False if the child could not be terminated.
This forces a child process to terminate. It starts nicely with SIGHUP and SIGINT. If "force" is True then moves onto SIGKILL. This returns True if the child was terminated. This returns False if the child could not be terminated.
[ "This", "forces", "a", "child", "process", "to", "terminate", ".", "It", "starts", "nicely", "with", "SIGHUP", "and", "SIGINT", ".", "If", "force", "is", "True", "then", "moves", "onto", "SIGKILL", ".", "This", "returns", "True", "if", "the", "child", "was", "terminated", ".", "This", "returns", "False", "if", "the", "child", "could", "not", "be", "terminated", "." ]
def terminate(self, force=False): '''This forces a child process to terminate. It starts nicely with SIGHUP and SIGINT. If "force" is True then moves onto SIGKILL. This returns True if the child was terminated. This returns False if the child could not be terminated. ''' if not self.isalive(): return True try: self.kill(signal.SIGHUP) time.sleep(self.delayafterterminate) if not self.isalive(): return True self.kill(signal.SIGCONT) time.sleep(self.delayafterterminate) if not self.isalive(): return True self.kill(signal.SIGINT) time.sleep(self.delayafterterminate) if not self.isalive(): return True if force: self.kill(signal.SIGKILL) time.sleep(self.delayafterterminate) if not self.isalive(): return True else: return False return False except OSError: # I think there are kernel timing issues that sometimes cause # this to happen. I think isalive() reports True, but the # process is dead to the kernel. # Make one last attempt to see if the kernel is up to date. time.sleep(self.delayafterterminate * 10) if not self.isalive(): return True else: return False
[ "def", "terminate", "(", "self", ",", "force", "=", "False", ")", ":", "if", "not", "self", ".", "isalive", "(", ")", ":", "return", "True", "try", ":", "self", ".", "kill", "(", "signal", ".", "SIGHUP", ")", "time", ".", "sleep", "(", "self", ".", "delayafterterminate", ")", "if", "not", "self", ".", "isalive", "(", ")", ":", "return", "True", "self", ".", "kill", "(", "signal", ".", "SIGCONT", ")", "time", ".", "sleep", "(", "self", ".", "delayafterterminate", ")", "if", "not", "self", ".", "isalive", "(", ")", ":", "return", "True", "self", ".", "kill", "(", "signal", ".", "SIGINT", ")", "time", ".", "sleep", "(", "self", ".", "delayafterterminate", ")", "if", "not", "self", ".", "isalive", "(", ")", ":", "return", "True", "if", "force", ":", "self", ".", "kill", "(", "signal", ".", "SIGKILL", ")", "time", ".", "sleep", "(", "self", ".", "delayafterterminate", ")", "if", "not", "self", ".", "isalive", "(", ")", ":", "return", "True", "else", ":", "return", "False", "return", "False", "except", "OSError", ":", "# I think there are kernel timing issues that sometimes cause", "# this to happen. I think isalive() reports True, but the", "# process is dead to the kernel.", "# Make one last attempt to see if the kernel is up to date.", "time", ".", "sleep", "(", "self", ".", "delayafterterminate", "*", "10", ")", "if", "not", "self", ".", "isalive", "(", ")", ":", "return", "True", "else", ":", "return", "False" ]
https://github.com/llvm-mirror/lldb/blob/d01083a850f577b85501a0902b52fd0930de72c7/third_party/Python/module/pexpect-4.6/pexpect/pty_spawn.py#L609-L647
BlzFans/wke
b0fa21158312e40c5fbd84682d643022b6c34a93
cygwin/lib/python2.6/textwrap.py
python
TextWrapper.wrap
(self, text)
return self._wrap_chunks(chunks)
wrap(text : string) -> [string] Reformat the single paragraph in 'text' so it fits in lines of no more than 'self.width' columns, and return a list of wrapped lines. Tabs in 'text' are expanded with string.expandtabs(), and all other whitespace characters (including newline) are converted to space.
wrap(text : string) -> [string]
[ "wrap", "(", "text", ":", "string", ")", "-", ">", "[", "string", "]" ]
def wrap(self, text): """wrap(text : string) -> [string] Reformat the single paragraph in 'text' so it fits in lines of no more than 'self.width' columns, and return a list of wrapped lines. Tabs in 'text' are expanded with string.expandtabs(), and all other whitespace characters (including newline) are converted to space. """ text = self._munge_whitespace(text) chunks = self._split(text) if self.fix_sentence_endings: self._fix_sentence_endings(chunks) return self._wrap_chunks(chunks)
[ "def", "wrap", "(", "self", ",", "text", ")", ":", "text", "=", "self", ".", "_munge_whitespace", "(", "text", ")", "chunks", "=", "self", ".", "_split", "(", "text", ")", "if", "self", ".", "fix_sentence_endings", ":", "self", ".", "_fix_sentence_endings", "(", "chunks", ")", "return", "self", ".", "_wrap_chunks", "(", "chunks", ")" ]
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/textwrap.py#L308-L321
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_misc.py
python
PlatformInformation.SetLinuxDistributionInfo
(*args, **kwargs)
return _misc_.PlatformInformation_SetLinuxDistributionInfo(*args, **kwargs)
SetLinuxDistributionInfo(self, LinuxDistributionInfo di)
SetLinuxDistributionInfo(self, LinuxDistributionInfo di)
[ "SetLinuxDistributionInfo", "(", "self", "LinuxDistributionInfo", "di", ")" ]
def SetLinuxDistributionInfo(*args, **kwargs): """SetLinuxDistributionInfo(self, LinuxDistributionInfo di)""" return _misc_.PlatformInformation_SetLinuxDistributionInfo(*args, **kwargs)
[ "def", "SetLinuxDistributionInfo", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_misc_", ".", "PlatformInformation_SetLinuxDistributionInfo", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_misc.py#L1178-L1180
hughperkins/tf-coriander
970d3df6c11400ad68405f22b0c42a52374e94ca
tensorflow/python/framework/errors.py
python
OpError.node_def
(self)
return self._node_def
The `NodeDef` proto representing the op that failed.
The `NodeDef` proto representing the op that failed.
[ "The", "NodeDef", "proto", "representing", "the", "op", "that", "failed", "." ]
def node_def(self): """The `NodeDef` proto representing the op that failed.""" return self._node_def
[ "def", "node_def", "(", "self", ")", ":", "return", "self", ".", "_node_def" ]
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/framework/errors.py#L83-L85
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/ipaddress.py
python
ip_interface
(address)
Take an IP string/int and return an object of the correct type. Args: address: A string or integer, the IP address. Either IPv4 or IPv6 addresses may be supplied; integers less than 2**32 will be considered to be IPv4 by default. Returns: An IPv4Interface or IPv6Interface object. Raises: ValueError: if the string passed isn't either a v4 or a v6 address. Notes: The IPv?Interface classes describe an Address on a particular Network, so they're basically a combination of both the Address and Network classes.
Take an IP string/int and return an object of the correct type.
[ "Take", "an", "IP", "string", "/", "int", "and", "return", "an", "object", "of", "the", "correct", "type", "." ]
def ip_interface(address): """Take an IP string/int and return an object of the correct type. Args: address: A string or integer, the IP address. Either IPv4 or IPv6 addresses may be supplied; integers less than 2**32 will be considered to be IPv4 by default. Returns: An IPv4Interface or IPv6Interface object. Raises: ValueError: if the string passed isn't either a v4 or a v6 address. Notes: The IPv?Interface classes describe an Address on a particular Network, so they're basically a combination of both the Address and Network classes. """ try: return IPv4Interface(address) except (AddressValueError, NetmaskValueError): pass try: return IPv6Interface(address) except (AddressValueError, NetmaskValueError): pass raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' % address)
[ "def", "ip_interface", "(", "address", ")", ":", "try", ":", "return", "IPv4Interface", "(", "address", ")", "except", "(", "AddressValueError", ",", "NetmaskValueError", ")", ":", "pass", "try", ":", "return", "IPv6Interface", "(", "address", ")", "except", "(", "AddressValueError", ",", "NetmaskValueError", ")", ":", "pass", "raise", "ValueError", "(", "'%r does not appear to be an IPv4 or IPv6 interface'", "%", "address", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/ipaddress.py#L87-L119
apache/qpid-proton
6bcdfebb55ea3554bc29b1901422532db331a591
python/proton/_transport.py
python
Transport.pop
(self, size: int)
Removes ``size`` bytes of output from the pending output queue following the transport's head pointer. Calls to this function may alter the transport's head pointer as well as the number of pending bytes reported by :meth:`pending`. :param size: Number of bytes to remove.
Removes ``size`` bytes of output from the pending output queue following the transport's head pointer.
[ "Removes", "size", "bytes", "of", "output", "from", "the", "pending", "output", "queue", "following", "the", "transport", "s", "head", "pointer", "." ]
def pop(self, size: int) -> None: """ Removes ``size`` bytes of output from the pending output queue following the transport's head pointer. Calls to this function may alter the transport's head pointer as well as the number of pending bytes reported by :meth:`pending`. :param size: Number of bytes to remove. """ pn_transport_pop(self._impl, size)
[ "def", "pop", "(", "self", ",", "size", ":", "int", ")", "->", "None", ":", "pn_transport_pop", "(", "self", ".", "_impl", ",", "size", ")" ]
https://github.com/apache/qpid-proton/blob/6bcdfebb55ea3554bc29b1901422532db331a591/python/proton/_transport.py#L345-L356
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_internal/vcs/versioncontrol.py
python
VersionControl.make_rev_args
(username, password)
return []
Return the RevOptions "extra arguments" to use in obtain().
Return the RevOptions "extra arguments" to use in obtain().
[ "Return", "the", "RevOptions", "extra", "arguments", "to", "use", "in", "obtain", "()", "." ]
def make_rev_args(username, password): # type: (Optional[str], Optional[HiddenText]) -> CommandArgs """ Return the RevOptions "extra arguments" to use in obtain(). """ return []
[ "def", "make_rev_args", "(", "username", ",", "password", ")", ":", "# type: (Optional[str], Optional[HiddenText]) -> CommandArgs", "return", "[", "]" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_internal/vcs/versioncontrol.py#L449-L454
borglab/gtsam
a5bee157efce6a0563704bce6a5d188c29817f39
wrap/gtwrap/template_instantiator/helpers.py
python
instantiate_name
(original_name: str, instantiations: Sequence[parser.Typename])
return "{}{}".format(original_name, "".join(instantiated_names))
Concatenate instantiated types with `original_name` to form a new instantiated name. NOTE: To avoid conflicts, we should include the instantiation's namespaces, but that is too verbose.
Concatenate instantiated types with `original_name` to form a new instantiated name.
[ "Concatenate", "instantiated", "types", "with", "original_name", "to", "form", "a", "new", "instantiated", "name", "." ]
def instantiate_name(original_name: str, instantiations: Sequence[parser.Typename]): """ Concatenate instantiated types with `original_name` to form a new instantiated name. NOTE: To avoid conflicts, we should include the instantiation's namespaces, but that is too verbose. """ instantiated_names = [] for inst in instantiations: # Ensure the first character of the type is capitalized name = inst.instantiated_name() # Using `capitalize` on the complete name causes other caps to be lower case instantiated_names.append(name.replace(name[0], name[0].capitalize())) return "{}{}".format(original_name, "".join(instantiated_names))
[ "def", "instantiate_name", "(", "original_name", ":", "str", ",", "instantiations", ":", "Sequence", "[", "parser", ".", "Typename", "]", ")", ":", "instantiated_names", "=", "[", "]", "for", "inst", "in", "instantiations", ":", "# Ensure the first character of the type is capitalized", "name", "=", "inst", ".", "instantiated_name", "(", ")", "# Using `capitalize` on the complete name causes other caps to be lower case", "instantiated_names", ".", "append", "(", "name", ".", "replace", "(", "name", "[", "0", "]", ",", "name", "[", "0", "]", ".", "capitalize", "(", ")", ")", ")", "return", "\"{}{}\"", ".", "format", "(", "original_name", ",", "\"\"", ".", "join", "(", "instantiated_names", ")", ")" ]
https://github.com/borglab/gtsam/blob/a5bee157efce6a0563704bce6a5d188c29817f39/wrap/gtwrap/template_instantiator/helpers.py#L196-L212
maidsafe-archive/MaidSafe
defd65e1c8cfb6a1cbdeaaa0eee31d065421792d
tools/cpplint.py
python
CheckEmptyLoopBody
(filename, clean_lines, linenum, error)
Loop for empty loop body with only a single semicolon. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Loop for empty loop body with only a single semicolon.
[ "Loop", "for", "empty", "loop", "body", "with", "only", "a", "single", "semicolon", "." ]
def CheckEmptyLoopBody(filename, clean_lines, linenum, error): """Loop for empty loop body with only a single semicolon. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ # Search for loop keywords at the beginning of the line. Because only # whitespaces are allowed before the keywords, this will also ignore most # do-while-loops, since those lines should start with closing brace. line = clean_lines.elided[linenum] if Match(r'\s*(for|while)\s*\(', line): # Find the end of the conditional expression (end_line, end_linenum, end_pos) = CloseExpression( clean_lines, linenum, line.find('(')) # Output warning if what follows the condition expression is a semicolon. # No warning for all other cases, including whitespace or newline, since we # have a separate check for semicolons preceded by whitespace. if end_pos >= 0 and Match(r';', end_line[end_pos:]): error(filename, end_linenum, 'whitespace/empty_loop_body', 5, 'Empty loop bodies should use {} or continue')
[ "def", "CheckEmptyLoopBody", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "# Search for loop keywords at the beginning of the line. Because only", "# whitespaces are allowed before the keywords, this will also ignore most", "# do-while-loops, since those lines should start with closing brace.", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "if", "Match", "(", "r'\\s*(for|while)\\s*\\('", ",", "line", ")", ":", "# Find the end of the conditional expression", "(", "end_line", ",", "end_linenum", ",", "end_pos", ")", "=", "CloseExpression", "(", "clean_lines", ",", "linenum", ",", "line", ".", "find", "(", "'('", ")", ")", "# Output warning if what follows the condition expression is a semicolon.", "# No warning for all other cases, including whitespace or newline, since we", "# have a separate check for semicolons preceded by whitespace.", "if", "end_pos", ">=", "0", "and", "Match", "(", "r';'", ",", "end_line", "[", "end_pos", ":", "]", ")", ":", "error", "(", "filename", ",", "end_linenum", ",", "'whitespace/empty_loop_body'", ",", "5", ",", "'Empty loop bodies should use {} or continue'", ")" ]
https://github.com/maidsafe-archive/MaidSafe/blob/defd65e1c8cfb6a1cbdeaaa0eee31d065421792d/tools/cpplint.py#L2652-L2676
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/setuptools/py2/setuptools/command/egg_info.py
python
egg_info.write_or_delete_file
(self, what, filename, data, force=False)
Write `data` to `filename` or delete if empty If `data` is non-empty, this routine is the same as ``write_file()``. If `data` is empty but not ``None``, this is the same as calling ``delete_file(filename)`. If `data` is ``None``, then this is a no-op unless `filename` exists, in which case a warning is issued about the orphaned file (if `force` is false), or deleted (if `force` is true).
Write `data` to `filename` or delete if empty
[ "Write", "data", "to", "filename", "or", "delete", "if", "empty" ]
def write_or_delete_file(self, what, filename, data, force=False): """Write `data` to `filename` or delete if empty If `data` is non-empty, this routine is the same as ``write_file()``. If `data` is empty but not ``None``, this is the same as calling ``delete_file(filename)`. If `data` is ``None``, then this is a no-op unless `filename` exists, in which case a warning is issued about the orphaned file (if `force` is false), or deleted (if `force` is true). """ if data: self.write_file(what, filename, data) elif os.path.exists(filename): if data is None and not force: log.warn( "%s not set in setup(), but %s exists", what, filename ) return else: self.delete_file(filename)
[ "def", "write_or_delete_file", "(", "self", ",", "what", ",", "filename", ",", "data", ",", "force", "=", "False", ")", ":", "if", "data", ":", "self", ".", "write_file", "(", "what", ",", "filename", ",", "data", ")", "elif", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "if", "data", "is", "None", "and", "not", "force", ":", "log", ".", "warn", "(", "\"%s not set in setup(), but %s exists\"", ",", "what", ",", "filename", ")", "return", "else", ":", "self", ".", "delete_file", "(", "filename", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py2/setuptools/command/egg_info.py#L242-L260
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/vpc/__init__.py
python
VPCConnection.get_all_route_tables
(self, route_table_ids=None, filters=None, dry_run=False)
return self.get_list('DescribeRouteTables', params, [('item', RouteTable)])
Retrieve information about your routing tables. You can filter results to return information only about those route tables that match your search parameters. Otherwise, all route tables associated with your account are returned. :type route_table_ids: list :param route_table_ids: A list of strings with the desired route table IDs. :type filters: list of tuples or dict :param filters: A list of tuples or dict containing filters. Each tuple or dict item consists of a filter key and a filter value. :type dry_run: bool :param dry_run: Set to True if the operation should not actually run. :rtype: list :return: A list of :class:`boto.vpc.routetable.RouteTable`
Retrieve information about your routing tables. You can filter results to return information only about those route tables that match your search parameters. Otherwise, all route tables associated with your account are returned.
[ "Retrieve", "information", "about", "your", "routing", "tables", ".", "You", "can", "filter", "results", "to", "return", "information", "only", "about", "those", "route", "tables", "that", "match", "your", "search", "parameters", ".", "Otherwise", "all", "route", "tables", "associated", "with", "your", "account", "are", "returned", "." ]
def get_all_route_tables(self, route_table_ids=None, filters=None, dry_run=False): """ Retrieve information about your routing tables. You can filter results to return information only about those route tables that match your search parameters. Otherwise, all route tables associated with your account are returned. :type route_table_ids: list :param route_table_ids: A list of strings with the desired route table IDs. :type filters: list of tuples or dict :param filters: A list of tuples or dict containing filters. Each tuple or dict item consists of a filter key and a filter value. :type dry_run: bool :param dry_run: Set to True if the operation should not actually run. :rtype: list :return: A list of :class:`boto.vpc.routetable.RouteTable` """ params = {} if route_table_ids: self.build_list_params(params, route_table_ids, "RouteTableId") if filters: self.build_filter_params(params, filters) if dry_run: params['DryRun'] = 'true' return self.get_list('DescribeRouteTables', params, [('item', RouteTable)])
[ "def", "get_all_route_tables", "(", "self", ",", "route_table_ids", "=", "None", ",", "filters", "=", "None", ",", "dry_run", "=", "False", ")", ":", "params", "=", "{", "}", "if", "route_table_ids", ":", "self", ".", "build_list_params", "(", "params", ",", "route_table_ids", ",", "\"RouteTableId\"", ")", "if", "filters", ":", "self", ".", "build_filter_params", "(", "params", ",", "filters", ")", "if", "dry_run", ":", "params", "[", "'DryRun'", "]", "=", "'true'", "return", "self", ".", "get_list", "(", "'DescribeRouteTables'", ",", "params", ",", "[", "(", "'item'", ",", "RouteTable", ")", "]", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/vpc/__init__.py#L196-L226
etternagame/etterna
8775f74ac9c353320128609d4b4150672e9a6d04
extern/fmt/support/docopt.py
python
transform
(pattern)
return Either(*[Required(*e) for e in result])
Expand pattern into an (almost) equivalent one, but with single Either. Example: ((-a | -b) (-c | -d)) => (-a -c | -a -d | -b -c | -b -d) Quirks: [-a] => (-a), (-a...) => (-a -a)
Expand pattern into an (almost) equivalent one, but with single Either.
[ "Expand", "pattern", "into", "an", "(", "almost", ")", "equivalent", "one", "but", "with", "single", "Either", "." ]
def transform(pattern): """Expand pattern into an (almost) equivalent one, but with single Either. Example: ((-a | -b) (-c | -d)) => (-a -c | -a -d | -b -c | -b -d) Quirks: [-a] => (-a), (-a...) => (-a -a) """ result = [] groups = [[pattern]] while groups: children = groups.pop(0) parents = [Required, Optional, OptionsShortcut, Either, OneOrMore] if any(t in map(type, children) for t in parents): child = [c for c in children if type(c) in parents][0] children.remove(child) if type(child) is Either: for c in child.children: groups.append([c] + children) elif type(child) is OneOrMore: groups.append(child.children * 2 + children) else: groups.append(child.children + children) else: result.append(children) return Either(*[Required(*e) for e in result])
[ "def", "transform", "(", "pattern", ")", ":", "result", "=", "[", "]", "groups", "=", "[", "[", "pattern", "]", "]", "while", "groups", ":", "children", "=", "groups", ".", "pop", "(", "0", ")", "parents", "=", "[", "Required", ",", "Optional", ",", "OptionsShortcut", ",", "Either", ",", "OneOrMore", "]", "if", "any", "(", "t", "in", "map", "(", "type", ",", "children", ")", "for", "t", "in", "parents", ")", ":", "child", "=", "[", "c", "for", "c", "in", "children", "if", "type", "(", "c", ")", "in", "parents", "]", "[", "0", "]", "children", ".", "remove", "(", "child", ")", "if", "type", "(", "child", ")", "is", "Either", ":", "for", "c", "in", "child", ".", "children", ":", "groups", ".", "append", "(", "[", "c", "]", "+", "children", ")", "elif", "type", "(", "child", ")", "is", "OneOrMore", ":", "groups", ".", "append", "(", "child", ".", "children", "*", "2", "+", "children", ")", "else", ":", "groups", ".", "append", "(", "child", ".", "children", "+", "children", ")", "else", ":", "result", ".", "append", "(", "children", ")", "return", "Either", "(", "*", "[", "Required", "(", "*", "e", ")", "for", "e", "in", "result", "]", ")" ]
https://github.com/etternagame/etterna/blob/8775f74ac9c353320128609d4b4150672e9a6d04/extern/fmt/support/docopt.py#L72-L96
BitMEX/api-connectors
37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812
auto-generated/python/swagger_client/models/margin.py
python
Margin.taxable_margin
(self)
return self._taxable_margin
Gets the taxable_margin of this Margin. # noqa: E501 :return: The taxable_margin of this Margin. # noqa: E501 :rtype: float
Gets the taxable_margin of this Margin. # noqa: E501
[ "Gets", "the", "taxable_margin", "of", "this", "Margin", ".", "#", "noqa", ":", "E501" ]
def taxable_margin(self): """Gets the taxable_margin of this Margin. # noqa: E501 :return: The taxable_margin of this Margin. # noqa: E501 :rtype: float """ return self._taxable_margin
[ "def", "taxable_margin", "(", "self", ")", ":", "return", "self", ".", "_taxable_margin" ]
https://github.com/BitMEX/api-connectors/blob/37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812/auto-generated/python/swagger_client/models/margin.py#L631-L638
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/stc.py
python
StyledTextCtrl.AutoCompSetFillUps
(*args, **kwargs)
return _stc.StyledTextCtrl_AutoCompSetFillUps(*args, **kwargs)
AutoCompSetFillUps(self, String characterSet) Define a set of characters that when typed will cause the autocompletion to choose the selected item.
AutoCompSetFillUps(self, String characterSet)
[ "AutoCompSetFillUps", "(", "self", "String", "characterSet", ")" ]
def AutoCompSetFillUps(*args, **kwargs): """ AutoCompSetFillUps(self, String characterSet) Define a set of characters that when typed will cause the autocompletion to choose the selected item. """ return _stc.StyledTextCtrl_AutoCompSetFillUps(*args, **kwargs)
[ "def", "AutoCompSetFillUps", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_stc", ".", "StyledTextCtrl_AutoCompSetFillUps", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/stc.py#L3120-L3127
deepmind/open_spiel
4ca53bea32bb2875c7385d215424048ae92f78c8
open_spiel/python/algorithms/cfr_br.py
python
CFRBRSolver._compute_best_responses
(self)
Computes each player best-response against the pool of other players.
Computes each player best-response against the pool of other players.
[ "Computes", "each", "player", "best", "-", "response", "against", "the", "pool", "of", "other", "players", "." ]
def _compute_best_responses(self): """Computes each player best-response against the pool of other players.""" def policy_fn(state): key = state.information_state_string() return self._get_infostate_policy(key) current_policy = policy.tabular_policy_from_callable(self._game, policy_fn) for player_id in range(self._game.num_players()): self._best_responses[player_id] = exploitability.best_response( self._game, current_policy, player_id)
[ "def", "_compute_best_responses", "(", "self", ")", ":", "def", "policy_fn", "(", "state", ")", ":", "key", "=", "state", ".", "information_state_string", "(", ")", "return", "self", ".", "_get_infostate_policy", "(", "key", ")", "current_policy", "=", "policy", ".", "tabular_policy_from_callable", "(", "self", ".", "_game", ",", "policy_fn", ")", "for", "player_id", "in", "range", "(", "self", ".", "_game", ".", "num_players", "(", ")", ")", ":", "self", ".", "_best_responses", "[", "player_id", "]", "=", "exploitability", ".", "best_response", "(", "self", ".", "_game", ",", "current_policy", ",", "player_id", ")" ]
https://github.com/deepmind/open_spiel/blob/4ca53bea32bb2875c7385d215424048ae92f78c8/open_spiel/python/algorithms/cfr_br.py#L99-L110
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_core.py
python
Window.GetFont
(*args, **kwargs)
return _core_.Window_GetFont(*args, **kwargs)
GetFont(self) -> Font Returns the default font used for this window.
GetFont(self) -> Font
[ "GetFont", "(", "self", ")", "-", ">", "Font" ]
def GetFont(*args, **kwargs): """ GetFont(self) -> Font Returns the default font used for this window. """ return _core_.Window_GetFont(*args, **kwargs)
[ "def", "GetFont", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Window_GetFont", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L10990-L10996
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/py/py/_io/terminalwriter.py
python
TerminalWriter.width_of_current_line
(self)
return self._width_of_current_line
Return an estimate of the width so far in the current line. .. versionadded:: 1.6.0 :rtype: int
Return an estimate of the width so far in the current line.
[ "Return", "an", "estimate", "of", "the", "width", "so", "far", "in", "the", "current", "line", "." ]
def width_of_current_line(self): """Return an estimate of the width so far in the current line. .. versionadded:: 1.6.0 :rtype: int """ return self._width_of_current_line
[ "def", "width_of_current_line", "(", "self", ")", ":", "return", "self", ".", "_width_of_current_line" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/py/py/_io/terminalwriter.py#L192-L199
grpc/grpc
27bc6fe7797e43298dc931b96dc57322d0852a9f
src/python/grpcio/grpc/_channel.py
python
_separate_channel_options
(options)
return python_options, core_options
Separates core channel options from Python channel options.
Separates core channel options from Python channel options.
[ "Separates", "core", "channel", "options", "from", "Python", "channel", "options", "." ]
def _separate_channel_options(options): """Separates core channel options from Python channel options.""" core_options = [] python_options = [] for pair in options: if pair[0] == grpc.experimental.ChannelOptions.SingleThreadedUnaryStream: python_options.append(pair) else: core_options.append(pair) return python_options, core_options
[ "def", "_separate_channel_options", "(", "options", ")", ":", "core_options", "=", "[", "]", "python_options", "=", "[", "]", "for", "pair", "in", "options", ":", "if", "pair", "[", "0", "]", "==", "grpc", ".", "experimental", ".", "ChannelOptions", ".", "SingleThreadedUnaryStream", ":", "python_options", ".", "append", "(", "pair", ")", "else", ":", "core_options", ".", "append", "(", "pair", ")", "return", "python_options", ",", "core_options" ]
https://github.com/grpc/grpc/blob/27bc6fe7797e43298dc931b96dc57322d0852a9f/src/python/grpcio/grpc/_channel.py#L1450-L1459
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
tools/auto_bisect/source_control.py
python
GetCommitTime
(git_revision, cwd=None)
return int(output)
Returns commit time for the given revision in UNIX timestamp.
Returns commit time for the given revision in UNIX timestamp.
[ "Returns", "commit", "time", "for", "the", "given", "revision", "in", "UNIX", "timestamp", "." ]
def GetCommitTime(git_revision, cwd=None): """Returns commit time for the given revision in UNIX timestamp.""" cmd = ['log', '--format=%ct', '-1', git_revision] output = bisect_utils.CheckRunGit(cmd, cwd=cwd) return int(output)
[ "def", "GetCommitTime", "(", "git_revision", ",", "cwd", "=", "None", ")", ":", "cmd", "=", "[", "'log'", ",", "'--format=%ct'", ",", "'-1'", ",", "git_revision", "]", "output", "=", "bisect_utils", ".", "CheckRunGit", "(", "cmd", ",", "cwd", "=", "cwd", ")", "return", "int", "(", "output", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/auto_bisect/source_control.py#L151-L155
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/protobuf/py3/google/protobuf/internal/python_message.py
python
_AddSerializePartialToStringMethod
(message_descriptor, cls)
Helper for _AddMessageMethods().
Helper for _AddMessageMethods().
[ "Helper", "for", "_AddMessageMethods", "()", "." ]
def _AddSerializePartialToStringMethod(message_descriptor, cls): """Helper for _AddMessageMethods().""" def SerializePartialToString(self, **kwargs): out = BytesIO() self._InternalSerialize(out.write, **kwargs) return out.getvalue() cls.SerializePartialToString = SerializePartialToString def InternalSerialize(self, write_bytes, deterministic=None): if deterministic is None: deterministic = ( api_implementation.IsPythonDefaultSerializationDeterministic()) else: deterministic = bool(deterministic) descriptor = self.DESCRIPTOR if descriptor.GetOptions().map_entry: # Fields of map entry should always be serialized. descriptor.fields_by_name['key']._encoder( write_bytes, self.key, deterministic) descriptor.fields_by_name['value']._encoder( write_bytes, self.value, deterministic) else: for field_descriptor, field_value in self.ListFields(): field_descriptor._encoder(write_bytes, field_value, deterministic) for tag_bytes, value_bytes in self._unknown_fields: write_bytes(tag_bytes) write_bytes(value_bytes) cls._InternalSerialize = InternalSerialize
[ "def", "_AddSerializePartialToStringMethod", "(", "message_descriptor", ",", "cls", ")", ":", "def", "SerializePartialToString", "(", "self", ",", "*", "*", "kwargs", ")", ":", "out", "=", "BytesIO", "(", ")", "self", ".", "_InternalSerialize", "(", "out", ".", "write", ",", "*", "*", "kwargs", ")", "return", "out", ".", "getvalue", "(", ")", "cls", ".", "SerializePartialToString", "=", "SerializePartialToString", "def", "InternalSerialize", "(", "self", ",", "write_bytes", ",", "deterministic", "=", "None", ")", ":", "if", "deterministic", "is", "None", ":", "deterministic", "=", "(", "api_implementation", ".", "IsPythonDefaultSerializationDeterministic", "(", ")", ")", "else", ":", "deterministic", "=", "bool", "(", "deterministic", ")", "descriptor", "=", "self", ".", "DESCRIPTOR", "if", "descriptor", ".", "GetOptions", "(", ")", ".", "map_entry", ":", "# Fields of map entry should always be serialized.", "descriptor", ".", "fields_by_name", "[", "'key'", "]", ".", "_encoder", "(", "write_bytes", ",", "self", ".", "key", ",", "deterministic", ")", "descriptor", ".", "fields_by_name", "[", "'value'", "]", ".", "_encoder", "(", "write_bytes", ",", "self", ".", "value", ",", "deterministic", ")", "else", ":", "for", "field_descriptor", ",", "field_value", "in", "self", ".", "ListFields", "(", ")", ":", "field_descriptor", ".", "_encoder", "(", "write_bytes", ",", "field_value", ",", "deterministic", ")", "for", "tag_bytes", ",", "value_bytes", "in", "self", ".", "_unknown_fields", ":", "write_bytes", "(", "tag_bytes", ")", "write_bytes", "(", "value_bytes", ")", "cls", ".", "_InternalSerialize", "=", "InternalSerialize" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/protobuf/py3/google/protobuf/internal/python_message.py#L1101-L1130
idaholab/moose
9eeebc65e098b4c30f8205fb41591fd5b61eb6ff
python/peacock/utils/TextSubWindow.py
python
TextSubWindow.sizeHint
(self, *args)
Return the saved size.
Return the saved size.
[ "Return", "the", "saved", "size", "." ]
def sizeHint(self, *args): """ Return the saved size. """ if self._size: return self._size else: return super(TextSubWindow, self).size()
[ "def", "sizeHint", "(", "self", ",", "*", "args", ")", ":", "if", "self", ".", "_size", ":", "return", "self", ".", "_size", "else", ":", "return", "super", "(", "TextSubWindow", ",", "self", ")", ".", "size", "(", ")" ]
https://github.com/idaholab/moose/blob/9eeebc65e098b4c30f8205fb41591fd5b61eb6ff/python/peacock/utils/TextSubWindow.py#L22-L29
psi4/psi4
be533f7f426b6ccc263904e55122899b16663395
psi4/driver/procrouting/response/scf_products.py
python
TDRSCFEngine._combine_H1_H2
(self, Fx, Jx, Kx=None)
return H1X, H2X
Build the combinations: Singlet: H1 X = [(Ea - Ei) + 4J - K - K^T]X H2 X = [(Ea - Ei) - K + K^T]X Triplet: H1 X = [(Ea - Ei) - K - K^T]X H2 X = [(Ea - Ei) - K + K^T]X
Build the combinations: Singlet: H1 X = [(Ea - Ei) + 4J - K - K^T]X H2 X = [(Ea - Ei) - K + K^T]X
[ "Build", "the", "combinations", ":", "Singlet", ":", "H1", "X", "=", "[", "(", "Ea", "-", "Ei", ")", "+", "4J", "-", "K", "-", "K^T", "]", "X", "H2", "X", "=", "[", "(", "Ea", "-", "Ei", ")", "-", "K", "+", "K^T", "]", "X" ]
def _combine_H1_H2(self, Fx, Jx, Kx=None): """Build the combinations: Singlet: H1 X = [(Ea - Ei) + 4J - K - K^T]X H2 X = [(Ea - Ei) - K + K^T]X Triplet: H1 X = [(Ea - Ei) - K - K^T]X H2 X = [(Ea - Ei) - K + K^T]X """ H1X = [] H2X = [] if Kx is not None: for Fxi, Jxi, Kxi in zip(Fx, Jx, Kx): Kxit = self.vector_transpose(Kxi) # H1x = -K singlet/triplet H1X_so = self.vector_copy(Kxi) H1X_so = self.vector_scale(-1.0, H1X_so) # H1X -= K^T singlet/triplet H1X_so = self.vector_axpy(-1.0, Kxit, H1X_so) # H2x = K^T - K singlet/triplet H2X_so = self.vector_axpy(-1.0, Kxi, Kxit) if self.singlet: # H1x += 4*J (singlet only) H1X_so = self.vector_axpy(4.0, Jxi, H1X_so) # transform + add Ea-Ei H1X.append(self.vector_axpy(1.0, Fxi, self._so_to_mo(H1X_so))) H2X.append(self.vector_axpy(1.0, Fxi, self._so_to_mo(H2X_so))) else: for Fxi, Jxi in zip(Fx, Jx): if self.singlet: H1X_so = self.vector_scale(4.0, Jxi) H1X.append(self.vector_axpy(1.0, Fxi, self._so_to_mo(H1X_so))) else: H1X.append(self.vector_copy(Fxi)) H2X.append(self.vector_copy(Fxi)) return H1X, H2X
[ "def", "_combine_H1_H2", "(", "self", ",", "Fx", ",", "Jx", ",", "Kx", "=", "None", ")", ":", "H1X", "=", "[", "]", "H2X", "=", "[", "]", "if", "Kx", "is", "not", "None", ":", "for", "Fxi", ",", "Jxi", ",", "Kxi", "in", "zip", "(", "Fx", ",", "Jx", ",", "Kx", ")", ":", "Kxit", "=", "self", ".", "vector_transpose", "(", "Kxi", ")", "# H1x = -K singlet/triplet", "H1X_so", "=", "self", ".", "vector_copy", "(", "Kxi", ")", "H1X_so", "=", "self", ".", "vector_scale", "(", "-", "1.0", ",", "H1X_so", ")", "# H1X -= K^T singlet/triplet", "H1X_so", "=", "self", ".", "vector_axpy", "(", "-", "1.0", ",", "Kxit", ",", "H1X_so", ")", "# H2x = K^T - K singlet/triplet", "H2X_so", "=", "self", ".", "vector_axpy", "(", "-", "1.0", ",", "Kxi", ",", "Kxit", ")", "if", "self", ".", "singlet", ":", "# H1x += 4*J (singlet only)", "H1X_so", "=", "self", ".", "vector_axpy", "(", "4.0", ",", "Jxi", ",", "H1X_so", ")", "# transform + add Ea-Ei", "H1X", ".", "append", "(", "self", ".", "vector_axpy", "(", "1.0", ",", "Fxi", ",", "self", ".", "_so_to_mo", "(", "H1X_so", ")", ")", ")", "H2X", ".", "append", "(", "self", ".", "vector_axpy", "(", "1.0", ",", "Fxi", ",", "self", ".", "_so_to_mo", "(", "H2X_so", ")", ")", ")", "else", ":", "for", "Fxi", ",", "Jxi", "in", "zip", "(", "Fx", ",", "Jx", ")", ":", "if", "self", ".", "singlet", ":", "H1X_so", "=", "self", ".", "vector_scale", "(", "4.0", ",", "Jxi", ")", "H1X", ".", "append", "(", "self", ".", "vector_axpy", "(", "1.0", ",", "Fxi", ",", "self", ".", "_so_to_mo", "(", "H1X_so", ")", ")", ")", "else", ":", "H1X", ".", "append", "(", "self", ".", "vector_copy", "(", "Fxi", ")", ")", "H2X", ".", "append", "(", "self", ".", "vector_copy", "(", "Fxi", ")", ")", "return", "H1X", ",", "H2X" ]
https://github.com/psi4/psi4/blob/be533f7f426b6ccc263904e55122899b16663395/psi4/driver/procrouting/response/scf_products.py#L325-L364
apache/incubator-mxnet
f03fb23f1d103fec9541b5ae59ee06b1734a51d9
python/mxnet/ndarray/numpy/_op.py
python
tril
(m, k=0)
return _api_internal.tril(m, k)
r""" Lower triangle of an array. Return a copy of an array with elements above the `k`-th diagonal zeroed. Parameters ---------- m : ndarray, shape (M, N) Input array. k : int, optional Diagonal above which to zero elements. `k = 0` (the default) is the main diagonal, `k < 0` is below it and `k > 0` is above. Returns ------- tril : ndarray, shape (M, N) Lower triangle of `m`, of same shape and data-type as `m`. See Also -------- triu : same thing, only for the upper triangle Examples -------- >>> a = np.array([[1,2,3],[4,5,6],[7,8,9],[10,11,12]]) >>> np.tril(a, -1) array([[ 0., 0., 0.], [ 4., 0., 0.], [ 7., 8., 0.], [10., 11., 12.]])
r""" Lower triangle of an array.
[ "r", "Lower", "triangle", "of", "an", "array", "." ]
def tril(m, k=0): r""" Lower triangle of an array. Return a copy of an array with elements above the `k`-th diagonal zeroed. Parameters ---------- m : ndarray, shape (M, N) Input array. k : int, optional Diagonal above which to zero elements. `k = 0` (the default) is the main diagonal, `k < 0` is below it and `k > 0` is above. Returns ------- tril : ndarray, shape (M, N) Lower triangle of `m`, of same shape and data-type as `m`. See Also -------- triu : same thing, only for the upper triangle Examples -------- >>> a = np.array([[1,2,3],[4,5,6],[7,8,9],[10,11,12]]) >>> np.tril(a, -1) array([[ 0., 0., 0.], [ 4., 0., 0.], [ 7., 8., 0.], [10., 11., 12.]]) """ return _api_internal.tril(m, k)
[ "def", "tril", "(", "m", ",", "k", "=", "0", ")", ":", "return", "_api_internal", ".", "tril", "(", "m", ",", "k", ")" ]
https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/ndarray/numpy/_op.py#L2233-L2265
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/index/package_finder.py
python
PackageFinder.create
( cls, link_collector, # type: LinkCollector selection_prefs, # type: SelectionPreferences target_python=None, # type: Optional[TargetPython] )
return cls( candidate_prefs=candidate_prefs, link_collector=link_collector, target_python=target_python, allow_yanked=selection_prefs.allow_yanked, format_control=selection_prefs.format_control, ignore_requires_python=selection_prefs.ignore_requires_python, )
Create a PackageFinder. :param selection_prefs: The candidate selection preferences, as a SelectionPreferences object. :param target_python: The target Python interpreter to use when checking compatibility. If None (the default), a TargetPython object will be constructed from the running Python.
Create a PackageFinder.
[ "Create", "a", "PackageFinder", "." ]
def create( cls, link_collector, # type: LinkCollector selection_prefs, # type: SelectionPreferences target_python=None, # type: Optional[TargetPython] ): # type: (...) -> PackageFinder """Create a PackageFinder. :param selection_prefs: The candidate selection preferences, as a SelectionPreferences object. :param target_python: The target Python interpreter to use when checking compatibility. If None (the default), a TargetPython object will be constructed from the running Python. """ if target_python is None: target_python = TargetPython() candidate_prefs = CandidatePreferences( prefer_binary=selection_prefs.prefer_binary, allow_all_prereleases=selection_prefs.allow_all_prereleases, ) return cls( candidate_prefs=candidate_prefs, link_collector=link_collector, target_python=target_python, allow_yanked=selection_prefs.allow_yanked, format_control=selection_prefs.format_control, ignore_requires_python=selection_prefs.ignore_requires_python, )
[ "def", "create", "(", "cls", ",", "link_collector", ",", "# type: LinkCollector", "selection_prefs", ",", "# type: SelectionPreferences", "target_python", "=", "None", ",", "# type: Optional[TargetPython]", ")", ":", "# type: (...) -> PackageFinder", "if", "target_python", "is", "None", ":", "target_python", "=", "TargetPython", "(", ")", "candidate_prefs", "=", "CandidatePreferences", "(", "prefer_binary", "=", "selection_prefs", ".", "prefer_binary", ",", "allow_all_prereleases", "=", "selection_prefs", ".", "allow_all_prereleases", ",", ")", "return", "cls", "(", "candidate_prefs", "=", "candidate_prefs", ",", "link_collector", "=", "link_collector", ",", "target_python", "=", "target_python", ",", "allow_yanked", "=", "selection_prefs", ".", "allow_yanked", ",", "format_control", "=", "selection_prefs", ".", "format_control", ",", "ignore_requires_python", "=", "selection_prefs", ".", "ignore_requires_python", ",", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/index/package_finder.py#L1245-L1305
RobotLocomotion/drake
0e18a34604c45ed65bc9018a54f7610f91cdad5b
bindings/pydrake/common/jupyter.py
python
process_ipywidget_events
(num_events_to_process=1)
Allows the kernel to process GUI events. This is required in order to process ipywidget updates inside a simulation loop.
Allows the kernel to process GUI events. This is required in order to process ipywidget updates inside a simulation loop.
[ "Allows", "the", "kernel", "to", "process", "GUI", "events", ".", "This", "is", "required", "in", "order", "to", "process", "ipywidget", "updates", "inside", "a", "simulation", "loop", "." ]
def process_ipywidget_events(num_events_to_process=1): """ Allows the kernel to process GUI events. This is required in order to process ipywidget updates inside a simulation loop. """ shell = get_ipython() # Ok to do nothing if running from console. if shell is None or not hasattr(shell, 'kernel'): return kernel = shell.kernel events = [] old_handler = kernel.shell_handlers['execute_request'] kernel.shell_handlers['execute_request'] = lambda *e: events.append(e) current_parent = (kernel._parent_ident, kernel._parent_header) for _ in range(num_events_to_process): # Ensure stdout still happens in the same cell. kernel.set_parent(*current_parent) kernel.do_one_iteration() kernel.set_parent(*current_parent) kernel.shell_handlers['execute_request'] = old_handler def _replay_events(shell, events): kernel = shell.kernel sys.stdout.flush() sys.stderr.flush() for stream, ident, parent in events: kernel.set_parent(ident, parent) kernel.execute_request(stream, ident, parent) if len(events) > 0: loop = asyncio.get_event_loop() if loop.is_running(): loop.call_soon(lambda: _replay_events(shell, events)) else: warn("One of your components is attempting to use pydrake's " "process_ipywidget_events function. However, this IPython " "kernel is not asyncio-based. This means the following:\n" " (1) Once your block cell is done executing, future cells " "will *not* execute, but it may appear like they are still " "executing ([*]).\n" " (2) Your Jupyter UI may break. If you find your UI to be " "unresponsive, you may need to restart the UI itself.\n" "To avoid this behavior, avoid requesting execution of " "future cells before or during execution of this cell.")
[ "def", "process_ipywidget_events", "(", "num_events_to_process", "=", "1", ")", ":", "shell", "=", "get_ipython", "(", ")", "# Ok to do nothing if running from console.", "if", "shell", "is", "None", "or", "not", "hasattr", "(", "shell", ",", "'kernel'", ")", ":", "return", "kernel", "=", "shell", ".", "kernel", "events", "=", "[", "]", "old_handler", "=", "kernel", ".", "shell_handlers", "[", "'execute_request'", "]", "kernel", ".", "shell_handlers", "[", "'execute_request'", "]", "=", "lambda", "*", "e", ":", "events", ".", "append", "(", "e", ")", "current_parent", "=", "(", "kernel", ".", "_parent_ident", ",", "kernel", ".", "_parent_header", ")", "for", "_", "in", "range", "(", "num_events_to_process", ")", ":", "# Ensure stdout still happens in the same cell.", "kernel", ".", "set_parent", "(", "*", "current_parent", ")", "kernel", ".", "do_one_iteration", "(", ")", "kernel", ".", "set_parent", "(", "*", "current_parent", ")", "kernel", ".", "shell_handlers", "[", "'execute_request'", "]", "=", "old_handler", "def", "_replay_events", "(", "shell", ",", "events", ")", ":", "kernel", "=", "shell", ".", "kernel", "sys", ".", "stdout", ".", "flush", "(", ")", "sys", ".", "stderr", ".", "flush", "(", ")", "for", "stream", ",", "ident", ",", "parent", "in", "events", ":", "kernel", ".", "set_parent", "(", "ident", ",", "parent", ")", "kernel", ".", "execute_request", "(", "stream", ",", "ident", ",", "parent", ")", "if", "len", "(", "events", ")", ">", "0", ":", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "if", "loop", ".", "is_running", "(", ")", ":", "loop", ".", "call_soon", "(", "lambda", ":", "_replay_events", "(", "shell", ",", "events", ")", ")", "else", ":", "warn", "(", "\"One of your components is attempting to use pydrake's \"", "\"process_ipywidget_events function. However, this IPython \"", "\"kernel is not asyncio-based. This means the following:\\n\"", "\" (1) Once your block cell is done executing, future cells \"", "\"will *not* execute, but it may appear like they are still \"", "\"executing ([*]).\\n\"", "\" (2) Your Jupyter UI may break. If you find your UI to be \"", "\"unresponsive, you may need to restart the UI itself.\\n\"", "\"To avoid this behavior, avoid requesting execution of \"", "\"future cells before or during execution of this cell.\"", ")" ]
https://github.com/RobotLocomotion/drake/blob/0e18a34604c45ed65bc9018a54f7610f91cdad5b/bindings/pydrake/common/jupyter.py#L25-L71
martinmoene/string-view-lite
6c2ba8672db54a355a6d76c820dd46ecda254038
script/create-vcpkg.py
python
createControl
( args )
Create vcpkg CONTROL file
Create vcpkg CONTROL file
[ "Create", "vcpkg", "CONTROL", "file" ]
def createControl( args ): """Create vcpkg CONTROL file""" output = tpl_vcpkg_control.format( prj=args.project, ver=args.version, desc=args.description ) if args.verbose: print( "Creating control file '{f}':".format( f=control_path( args ) ) ) if args.verbose > 1: print( output ) os.makedirs( os.path.dirname( control_path( args ) ), exist_ok=True ) with open( control_path( args ), 'w') as f: print( output, file=f )
[ "def", "createControl", "(", "args", ")", ":", "output", "=", "tpl_vcpkg_control", ".", "format", "(", "prj", "=", "args", ".", "project", ",", "ver", "=", "args", ".", "version", ",", "desc", "=", "args", ".", "description", ")", "if", "args", ".", "verbose", ":", "print", "(", "\"Creating control file '{f}':\"", ".", "format", "(", "f", "=", "control_path", "(", "args", ")", ")", ")", "if", "args", ".", "verbose", ">", "1", ":", "print", "(", "output", ")", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "control_path", "(", "args", ")", ")", ",", "exist_ok", "=", "True", ")", "with", "open", "(", "control_path", "(", "args", ")", ",", "'w'", ")", "as", "f", ":", "print", "(", "output", ",", "file", "=", "f", ")" ]
https://github.com/martinmoene/string-view-lite/blob/6c2ba8672db54a355a6d76c820dd46ecda254038/script/create-vcpkg.py#L122-L132
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/graph_editor/subgraph.py
python
SubGraphView.__bool__
(self)
return self._graph is not None
Allows for implicit boolean conversion.
Allows for implicit boolean conversion.
[ "Allows", "for", "implicit", "boolean", "conversion", "." ]
def __bool__(self): """Allows for implicit boolean conversion.""" return self._graph is not None
[ "def", "__bool__", "(", "self", ")", ":", "return", "self", ".", "_graph", "is", "not", "None" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/graph_editor/subgraph.py#L518-L520
larroy/clearskies_core
3574ddf0edc8555454c7044126e786a6c29444dc
tools/gyp/pylib/gyp/generator/ninja.py
python
NinjaWriter.WriteNewNinjaRule
(self, name, args, description, is_cygwin, env)
return rule_name, args
Write out a new ninja "rule" statement for a given command. Returns the name of the new rule, and a copy of |args| with variables expanded.
Write out a new ninja "rule" statement for a given command.
[ "Write", "out", "a", "new", "ninja", "rule", "statement", "for", "a", "given", "command", "." ]
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env): """Write out a new ninja "rule" statement for a given command. Returns the name of the new rule, and a copy of |args| with variables expanded.""" if self.flavor == 'win': args = [self.msvs_settings.ConvertVSMacros( arg, self.base_to_build, config=self.config_name) for arg in args] description = self.msvs_settings.ConvertVSMacros( description, config=self.config_name) elif self.flavor == 'mac': # |env| is an empty list on non-mac. args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args] description = gyp.xcode_emulation.ExpandEnvVars(description, env) # TODO: we shouldn't need to qualify names; we do it because # currently the ninja rule namespace is global, but it really # should be scoped to the subninja. rule_name = self.name if self.toolset == 'target': rule_name += '.' + self.toolset rule_name += '.' + name rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name) # Remove variable references, but not if they refer to the magic rule # variables. This is not quite right, as it also protects these for # actions, not just for rules where they are valid. Good enough. protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ] protect = '(?!' + '|'.join(map(re.escape, protect)) + ')' description = re.sub(protect + r'\$', '_', description) # gyp dictates that commands are run from the base directory. # cd into the directory before running, and adjust paths in # the arguments to point to the proper locations. rspfile = None rspfile_content = None args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args] if self.flavor == 'win': rspfile = rule_name + '.$unique_name.rsp' # The cygwin case handles this inside the bash sub-shell. run_in = '' if is_cygwin else ' ' + self.build_to_base if is_cygwin: rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine( args, self.build_to_base) else: rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args) command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable + rspfile + run_in) else: env = self.ComputeExportEnvString(env) command = gyp.common.EncodePOSIXShellList(args) command = 'cd %s; ' % self.build_to_base + env + command # GYP rules/actions express being no-ops by not touching their outputs. # Avoid executing downstream dependencies in this case by specifying # restat=1 to ninja. self.ninja.rule(rule_name, command, description, restat=True, rspfile=rspfile, rspfile_content=rspfile_content) self.ninja.newline() return rule_name, args
[ "def", "WriteNewNinjaRule", "(", "self", ",", "name", ",", "args", ",", "description", ",", "is_cygwin", ",", "env", ")", ":", "if", "self", ".", "flavor", "==", "'win'", ":", "args", "=", "[", "self", ".", "msvs_settings", ".", "ConvertVSMacros", "(", "arg", ",", "self", ".", "base_to_build", ",", "config", "=", "self", ".", "config_name", ")", "for", "arg", "in", "args", "]", "description", "=", "self", ".", "msvs_settings", ".", "ConvertVSMacros", "(", "description", ",", "config", "=", "self", ".", "config_name", ")", "elif", "self", ".", "flavor", "==", "'mac'", ":", "# |env| is an empty list on non-mac.", "args", "=", "[", "gyp", ".", "xcode_emulation", ".", "ExpandEnvVars", "(", "arg", ",", "env", ")", "for", "arg", "in", "args", "]", "description", "=", "gyp", ".", "xcode_emulation", ".", "ExpandEnvVars", "(", "description", ",", "env", ")", "# TODO: we shouldn't need to qualify names; we do it because", "# currently the ninja rule namespace is global, but it really", "# should be scoped to the subninja.", "rule_name", "=", "self", ".", "name", "if", "self", ".", "toolset", "==", "'target'", ":", "rule_name", "+=", "'.'", "+", "self", ".", "toolset", "rule_name", "+=", "'.'", "+", "name", "rule_name", "=", "re", ".", "sub", "(", "'[^a-zA-Z0-9_]'", ",", "'_'", ",", "rule_name", ")", "# Remove variable references, but not if they refer to the magic rule", "# variables. This is not quite right, as it also protects these for", "# actions, not just for rules where they are valid. Good enough.", "protect", "=", "[", "'${root}'", ",", "'${dirname}'", ",", "'${source}'", ",", "'${ext}'", ",", "'${name}'", "]", "protect", "=", "'(?!'", "+", "'|'", ".", "join", "(", "map", "(", "re", ".", "escape", ",", "protect", ")", ")", "+", "')'", "description", "=", "re", ".", "sub", "(", "protect", "+", "r'\\$'", ",", "'_'", ",", "description", ")", "# gyp dictates that commands are run from the base directory.", "# cd into the directory before running, and adjust paths in", "# the arguments to point to the proper locations.", "rspfile", "=", "None", "rspfile_content", "=", "None", "args", "=", "[", "self", ".", "ExpandSpecial", "(", "arg", ",", "self", ".", "base_to_build", ")", "for", "arg", "in", "args", "]", "if", "self", ".", "flavor", "==", "'win'", ":", "rspfile", "=", "rule_name", "+", "'.$unique_name.rsp'", "# The cygwin case handles this inside the bash sub-shell.", "run_in", "=", "''", "if", "is_cygwin", "else", "' '", "+", "self", ".", "build_to_base", "if", "is_cygwin", ":", "rspfile_content", "=", "self", ".", "msvs_settings", ".", "BuildCygwinBashCommandLine", "(", "args", ",", "self", ".", "build_to_base", ")", "else", ":", "rspfile_content", "=", "gyp", ".", "msvs_emulation", ".", "EncodeRspFileList", "(", "args", ")", "command", "=", "(", "'%s gyp-win-tool action-wrapper $arch '", "%", "sys", ".", "executable", "+", "rspfile", "+", "run_in", ")", "else", ":", "env", "=", "self", ".", "ComputeExportEnvString", "(", "env", ")", "command", "=", "gyp", ".", "common", ".", "EncodePOSIXShellList", "(", "args", ")", "command", "=", "'cd %s; '", "%", "self", ".", "build_to_base", "+", "env", "+", "command", "# GYP rules/actions express being no-ops by not touching their outputs.", "# Avoid executing downstream dependencies in this case by specifying", "# restat=1 to ninja.", "self", ".", "ninja", ".", "rule", "(", "rule_name", ",", "command", ",", "description", ",", "restat", "=", "True", ",", "rspfile", "=", "rspfile", ",", "rspfile_content", "=", "rspfile_content", ")", "self", ".", "ninja", ".", "newline", "(", ")", "return", "rule_name", ",", "args" ]
https://github.com/larroy/clearskies_core/blob/3574ddf0edc8555454c7044126e786a6c29444dc/tools/gyp/pylib/gyp/generator/ninja.py#L1383-L1445
MythTV/mythtv
d282a209cb8be85d036f85a62a8ec971b67d45f4
mythtv/programs/scripts/internetcontent/nv_python_libs/vimeo/vimeo_api.py
python
SimpleOAuthClient._fetch_token
(self, oauth_request)
return oauth.OAuthToken.from_string(ans)
Sends a requests and interprets the result as a token string.
Sends a requests and interprets the result as a token string.
[ "Sends", "a", "requests", "and", "interprets", "the", "result", "as", "a", "token", "string", "." ]
def _fetch_token(self, oauth_request): """ Sends a requests and interprets the result as a token string. """ ans = self.curly.do_request(oauth_request.to_url()) return oauth.OAuthToken.from_string(ans)
[ "def", "_fetch_token", "(", "self", ",", "oauth_request", ")", ":", "ans", "=", "self", ".", "curly", ".", "do_request", "(", "oauth_request", ".", "to_url", "(", ")", ")", "return", "oauth", ".", "OAuthToken", ".", "from_string", "(", "ans", ")" ]
https://github.com/MythTV/mythtv/blob/d282a209cb8be85d036f85a62a8ec971b67d45f4/mythtv/programs/scripts/internetcontent/nv_python_libs/vimeo/vimeo_api.py#L278-L283
bundy-dns/bundy
3d41934996b82b0cd2fe22dd74d2abc1daba835d
src/lib/python/bundy/notify/notify_out.py
python
NotifyOut._prepare_select_info
(self)
return (block_timeout, valid_socks, notifying_zones)
Prepare the information for select(), returned value is one tuple (block_timeout, valid_socks, notifying_zones) block_timeout: the timeout for select() valid_socks: sockets list for waiting ready reading. notifying_zones: the zones which have been triggered for notify.
Prepare the information for select(), returned value is one tuple (block_timeout, valid_socks, notifying_zones) block_timeout: the timeout for select() valid_socks: sockets list for waiting ready reading. notifying_zones: the zones which have been triggered for notify.
[ "Prepare", "the", "information", "for", "select", "()", "returned", "value", "is", "one", "tuple", "(", "block_timeout", "valid_socks", "notifying_zones", ")", "block_timeout", ":", "the", "timeout", "for", "select", "()", "valid_socks", ":", "sockets", "list", "for", "waiting", "ready", "reading", ".", "notifying_zones", ":", "the", "zones", "which", "have", "been", "triggered", "for", "notify", "." ]
def _prepare_select_info(self): ''' Prepare the information for select(), returned value is one tuple (block_timeout, valid_socks, notifying_zones) block_timeout: the timeout for select() valid_socks: sockets list for waiting ready reading. notifying_zones: the zones which have been triggered for notify. ''' valid_socks = [] notifying_zones = {} min_timeout = None for info in self._notify_infos: sock = self._notify_infos[info].get_socket() if sock: valid_socks.append(sock) # If a non null timeout is specified notify has been scheduled # (in which case socket is still None) or sent (with a valid # socket). In either case we need add the zone to notifying_zones # so that we can invoke the appropriate event for the zone after # select. tmp_timeout = self._notify_infos[info].notify_timeout if tmp_timeout is not None: notifying_zones[info] = self._notify_infos[info] if min_timeout is not None: if tmp_timeout < min_timeout: min_timeout = tmp_timeout else: min_timeout = tmp_timeout block_timeout = None if min_timeout is not None: block_timeout = min_timeout - time.time() if block_timeout < 0: block_timeout = 0 return (block_timeout, valid_socks, notifying_zones)
[ "def", "_prepare_select_info", "(", "self", ")", ":", "valid_socks", "=", "[", "]", "notifying_zones", "=", "{", "}", "min_timeout", "=", "None", "for", "info", "in", "self", ".", "_notify_infos", ":", "sock", "=", "self", ".", "_notify_infos", "[", "info", "]", ".", "get_socket", "(", ")", "if", "sock", ":", "valid_socks", ".", "append", "(", "sock", ")", "# If a non null timeout is specified notify has been scheduled", "# (in which case socket is still None) or sent (with a valid", "# socket). In either case we need add the zone to notifying_zones", "# so that we can invoke the appropriate event for the zone after", "# select.", "tmp_timeout", "=", "self", ".", "_notify_infos", "[", "info", "]", ".", "notify_timeout", "if", "tmp_timeout", "is", "not", "None", ":", "notifying_zones", "[", "info", "]", "=", "self", ".", "_notify_infos", "[", "info", "]", "if", "min_timeout", "is", "not", "None", ":", "if", "tmp_timeout", "<", "min_timeout", ":", "min_timeout", "=", "tmp_timeout", "else", ":", "min_timeout", "=", "tmp_timeout", "block_timeout", "=", "None", "if", "min_timeout", "is", "not", "None", ":", "block_timeout", "=", "min_timeout", "-", "time", ".", "time", "(", ")", "if", "block_timeout", "<", "0", ":", "block_timeout", "=", "0", "return", "(", "block_timeout", ",", "valid_socks", ",", "notifying_zones", ")" ]
https://github.com/bundy-dns/bundy/blob/3d41934996b82b0cd2fe22dd74d2abc1daba835d/src/lib/python/bundy/notify/notify_out.py#L335-L373
TimoSaemann/caffe-segnet-cudnn5
abcf30dca449245e101bf4ced519f716177f0885
scripts/cpp_lint.py
python
CloseExpression
(clean_lines, linenum, pos)
return (line, clean_lines.NumLines(), -1)
If input points to ( or { or [ or <, finds the position that closes it. If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the linenum/pos that correspond to the closing of the expression. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. pos: A position on the line. Returns: A tuple (line, linenum, pos) pointer *past* the closing brace, or (line, len(lines), -1) if we never find a close. Note we ignore strings and comments when matching; and the line we return is the 'cleansed' line at linenum.
If input points to ( or { or [ or <, finds the position that closes it.
[ "If", "input", "points", "to", "(", "or", "{", "or", "[", "or", "<", "finds", "the", "position", "that", "closes", "it", "." ]
def CloseExpression(clean_lines, linenum, pos): """If input points to ( or { or [ or <, finds the position that closes it. If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the linenum/pos that correspond to the closing of the expression. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. pos: A position on the line. Returns: A tuple (line, linenum, pos) pointer *past* the closing brace, or (line, len(lines), -1) if we never find a close. Note we ignore strings and comments when matching; and the line we return is the 'cleansed' line at linenum. """ line = clean_lines.elided[linenum] startchar = line[pos] if startchar not in '({[<': return (line, clean_lines.NumLines(), -1) if startchar == '(': endchar = ')' if startchar == '[': endchar = ']' if startchar == '{': endchar = '}' if startchar == '<': endchar = '>' # Check first line (end_pos, num_open) = FindEndOfExpressionInLine( line, pos, 0, startchar, endchar) if end_pos > -1: return (line, linenum, end_pos) # Continue scanning forward while linenum < clean_lines.NumLines() - 1: linenum += 1 line = clean_lines.elided[linenum] (end_pos, num_open) = FindEndOfExpressionInLine( line, 0, num_open, startchar, endchar) if end_pos > -1: return (line, linenum, end_pos) # Did not find endchar before end of file, give up return (line, clean_lines.NumLines(), -1)
[ "def", "CloseExpression", "(", "clean_lines", ",", "linenum", ",", "pos", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "startchar", "=", "line", "[", "pos", "]", "if", "startchar", "not", "in", "'({[<'", ":", "return", "(", "line", ",", "clean_lines", ".", "NumLines", "(", ")", ",", "-", "1", ")", "if", "startchar", "==", "'('", ":", "endchar", "=", "')'", "if", "startchar", "==", "'['", ":", "endchar", "=", "']'", "if", "startchar", "==", "'{'", ":", "endchar", "=", "'}'", "if", "startchar", "==", "'<'", ":", "endchar", "=", "'>'", "# Check first line", "(", "end_pos", ",", "num_open", ")", "=", "FindEndOfExpressionInLine", "(", "line", ",", "pos", ",", "0", ",", "startchar", ",", "endchar", ")", "if", "end_pos", ">", "-", "1", ":", "return", "(", "line", ",", "linenum", ",", "end_pos", ")", "# Continue scanning forward", "while", "linenum", "<", "clean_lines", ".", "NumLines", "(", ")", "-", "1", ":", "linenum", "+=", "1", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "(", "end_pos", ",", "num_open", ")", "=", "FindEndOfExpressionInLine", "(", "line", ",", "0", ",", "num_open", ",", "startchar", ",", "endchar", ")", "if", "end_pos", ">", "-", "1", ":", "return", "(", "line", ",", "linenum", ",", "end_pos", ")", "# Did not find endchar before end of file, give up", "return", "(", "line", ",", "clean_lines", ".", "NumLines", "(", ")", ",", "-", "1", ")" ]
https://github.com/TimoSaemann/caffe-segnet-cudnn5/blob/abcf30dca449245e101bf4ced519f716177f0885/scripts/cpp_lint.py#L1254-L1297
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
lts/deps/v8/tools/stats-viewer.py
python
ChromeCounter.__init__
(self, data, name_offset, value_offset)
Create a new instance. Args: data: the shared data access object containing the counter name_offset: the byte offset of the start of this counter's name value_offset: the byte offset of the start of this counter's value
Create a new instance.
[ "Create", "a", "new", "instance", "." ]
def __init__(self, data, name_offset, value_offset): """Create a new instance. Args: data: the shared data access object containing the counter name_offset: the byte offset of the start of this counter's name value_offset: the byte offset of the start of this counter's value """ self.data = data self.name_offset = name_offset self.value_offset = value_offset
[ "def", "__init__", "(", "self", ",", "data", ",", "name_offset", ",", "value_offset", ")", ":", "self", ".", "data", "=", "data", "self", ".", "name_offset", "=", "name_offset", "self", ".", "value_offset", "=", "value_offset" ]
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/deps/v8/tools/stats-viewer.py#L389-L399
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
tools/cr/cr/actions/builder.py
python
Builder.Clean
(self, context, targets, arguments)
Clean temporary files built by a target.
Clean temporary files built by a target.
[ "Clean", "temporary", "files", "built", "by", "a", "target", "." ]
def Clean(self, context, targets, arguments): """Clean temporary files built by a target.""" raise NotImplementedError('Must be overridden.')
[ "def", "Clean", "(", "self", ",", "context", ",", "targets", ",", "arguments", ")", ":", "raise", "NotImplementedError", "(", "'Must be overridden.'", ")" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/cr/cr/actions/builder.py#L33-L35
v8mips/v8mips
f0c9cc0bbfd461c7f516799d9a58e9a7395f737e
build/landmine_utils.py
python
platform
()
Returns a string representing the platform this build is targetted for. Possible values: 'win', 'mac', 'linux', 'ios', 'android'
Returns a string representing the platform this build is targetted for. Possible values: 'win', 'mac', 'linux', 'ios', 'android'
[ "Returns", "a", "string", "representing", "the", "platform", "this", "build", "is", "targetted", "for", ".", "Possible", "values", ":", "win", "mac", "linux", "ios", "android" ]
def platform(): """ Returns a string representing the platform this build is targetted for. Possible values: 'win', 'mac', 'linux', 'ios', 'android' """ if 'OS' in gyp_defines(): if 'android' in gyp_defines()['OS']: return 'android' else: return gyp_defines()['OS'] elif IsWindows(): return 'win' elif IsLinux(): return 'linux' else: return 'mac'
[ "def", "platform", "(", ")", ":", "if", "'OS'", "in", "gyp_defines", "(", ")", ":", "if", "'android'", "in", "gyp_defines", "(", ")", "[", "'OS'", "]", ":", "return", "'android'", "else", ":", "return", "gyp_defines", "(", ")", "[", "'OS'", "]", "elif", "IsWindows", "(", ")", ":", "return", "'win'", "elif", "IsLinux", "(", ")", ":", "return", "'linux'", "else", ":", "return", "'mac'" ]
https://github.com/v8mips/v8mips/blob/f0c9cc0bbfd461c7f516799d9a58e9a7395f737e/build/landmine_utils.py#L68-L83
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DirectILLReduction.py
python
DirectILLReduction.summary
(self)
return 'Data reduction workflow for the direct geometry time-of-flight spectrometers at ILL.'
Return a summary of the algorithm.
Return a summary of the algorithm.
[ "Return", "a", "summary", "of", "the", "algorithm", "." ]
def summary(self): """Return a summary of the algorithm.""" return 'Data reduction workflow for the direct geometry time-of-flight spectrometers at ILL.'
[ "def", "summary", "(", "self", ")", ":", "return", "'Data reduction workflow for the direct geometry time-of-flight spectrometers at ILL.'" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DirectILLReduction.py#L246-L248
hfinkel/llvm-project-cxxjit
91084ef018240bbb8e24235ff5cd8c355a9c1a1e
clang/bindings/python/clang/cindex.py
python
Cursor.availability
(self)
return AvailabilityKind.from_id(self._availability)
Retrieves the availability of the entity pointed at by the cursor.
Retrieves the availability of the entity pointed at by the cursor.
[ "Retrieves", "the", "availability", "of", "the", "entity", "pointed", "at", "by", "the", "cursor", "." ]
def availability(self): """ Retrieves the availability of the entity pointed at by the cursor. """ if not hasattr(self, '_availability'): self._availability = conf.lib.clang_getCursorAvailability(self) return AvailabilityKind.from_id(self._availability)
[ "def", "availability", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_availability'", ")", ":", "self", ".", "_availability", "=", "conf", ".", "lib", ".", "clang_getCursorAvailability", "(", "self", ")", "return", "AvailabilityKind", ".", "from_id", "(", "self", ".", "_availability", ")" ]
https://github.com/hfinkel/llvm-project-cxxjit/blob/91084ef018240bbb8e24235ff5cd8c355a9c1a1e/clang/bindings/python/clang/cindex.py#L1620-L1627
RobotLocomotion/drake
0e18a34604c45ed65bc9018a54f7610f91cdad5b
doc/pydrake/pydrake_sphinx_extension.py
python
repair_naive_name_split
(objpath)
return out
Rejoins any strings with braces that were naively split across '.'.
Rejoins any strings with braces that were naively split across '.'.
[ "Rejoins", "any", "strings", "with", "braces", "that", "were", "naively", "split", "across", ".", "." ]
def repair_naive_name_split(objpath): """Rejoins any strings with braces that were naively split across '.'. """ num_open = 0 out = [] cur = '' for p in objpath: num_open += p.count('[') - p.count(']') assert num_open >= 0 if cur: cur += '.' cur += p if num_open == 0: out.append(cur) cur = '' assert len(cur) == 0, (objpath, cur, out) return out
[ "def", "repair_naive_name_split", "(", "objpath", ")", ":", "num_open", "=", "0", "out", "=", "[", "]", "cur", "=", "''", "for", "p", "in", "objpath", ":", "num_open", "+=", "p", ".", "count", "(", "'['", ")", "-", "p", ".", "count", "(", "']'", ")", "assert", "num_open", ">=", "0", "if", "cur", ":", "cur", "+=", "'.'", "cur", "+=", "p", "if", "num_open", "==", "0", ":", "out", ".", "append", "(", "cur", ")", "cur", "=", "''", "assert", "len", "(", "cur", ")", "==", "0", ",", "(", "objpath", ",", "cur", ",", "out", ")", "return", "out" ]
https://github.com/RobotLocomotion/drake/blob/0e18a34604c45ed65bc9018a54f7610f91cdad5b/doc/pydrake/pydrake_sphinx_extension.py#L49-L65
CNevd/Difacto_DMLC
f16862e35062707b1cf7e37d04d9b6ae34bbfd28
dmlc-core/scripts/lint3.py
python
LintHelper.process_cpp
(self, path, suffix)
Process a cpp file.
Process a cpp file.
[ "Process", "a", "cpp", "file", "." ]
def process_cpp(self, path, suffix): """Process a cpp file.""" _cpplint_state.ResetErrorCounts() cpplint.ProcessFile(str(path), _cpplint_state.verbose_level) _cpplint_state.PrintErrorCounts() errors = _cpplint_state.errors_by_category.copy() if suffix == 'h': self.cpp_header_map[str(path)] = errors else: self.cpp_src_map[str(path)] = errors
[ "def", "process_cpp", "(", "self", ",", "path", ",", "suffix", ")", ":", "_cpplint_state", ".", "ResetErrorCounts", "(", ")", "cpplint", ".", "ProcessFile", "(", "str", "(", "path", ")", ",", "_cpplint_state", ".", "verbose_level", ")", "_cpplint_state", ".", "PrintErrorCounts", "(", ")", "errors", "=", "_cpplint_state", ".", "errors_by_category", ".", "copy", "(", ")", "if", "suffix", "==", "'h'", ":", "self", ".", "cpp_header_map", "[", "str", "(", "path", ")", "]", "=", "errors", "else", ":", "self", ".", "cpp_src_map", "[", "str", "(", "path", ")", "]", "=", "errors" ]
https://github.com/CNevd/Difacto_DMLC/blob/f16862e35062707b1cf7e37d04d9b6ae34bbfd28/dmlc-core/scripts/lint3.py#L59-L68
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/beautifulsoup4/bs4/element.py
python
PageElement.find_previous
(self, name=None, attrs={}, text=None, **kwargs)
return self._find_one( self.find_all_previous, name, attrs, text, **kwargs)
Returns the first item that matches the given criteria and appears before this Tag in the document.
Returns the first item that matches the given criteria and appears before this Tag in the document.
[ "Returns", "the", "first", "item", "that", "matches", "the", "given", "criteria", "and", "appears", "before", "this", "Tag", "in", "the", "document", "." ]
def find_previous(self, name=None, attrs={}, text=None, **kwargs): """Returns the first item that matches the given criteria and appears before this Tag in the document.""" return self._find_one( self.find_all_previous, name, attrs, text, **kwargs)
[ "def", "find_previous", "(", "self", ",", "name", "=", "None", ",", "attrs", "=", "{", "}", ",", "text", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_find_one", "(", "self", ".", "find_all_previous", ",", "name", ",", "attrs", ",", "text", ",", "*", "*", "kwargs", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/beautifulsoup4/bs4/element.py#L408-L412
RamadhanAmizudin/malware
2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1
Fuzzbunch/fuzzbunch/redirection.py
python
RedirectionManager.redirect_local
(self, l, plugin, session_data)
targetip = Destination IP (on the target) targetport = Destination Port (on the target) redirip = IP of the LP redirport = Port on the LP
targetip = Destination IP (on the target) targetport = Destination Port (on the target) redirip = IP of the LP redirport = Port on the LP
[ "targetip", "=", "Destination", "IP", "(", "on", "the", "target", ")", "targetport", "=", "Destination", "Port", "(", "on", "the", "target", ")", "redirip", "=", "IP", "of", "the", "LP", "redirport", "=", "Port", "on", "the", "LP" ]
def redirect_local(self, l, plugin, session_data): """ targetip = Destination IP (on the target) targetport = Destination Port (on the target) redirip = IP of the LP redirport = Port on the LP """ # listenaddr - name of variable containing the LP IP # listenport - name of variable containing the LP Port # destaddr - name of variable containing the Target IP # destport - name of variable containing the Target Port # targetip - IP of the target # targetport - Port of the target # redirip - IP of the LP # redirport - Port of the LP params = iDict(plugin.getParameters()) # Get the defaults for the user prompt laddr = self.conv_param(l.listenaddr, params, session_data['params']) lport = self.conv_param(l.listenport, params, session_data['params']) daddr = self.conv_param(l.destaddr, params, session_data['params']) dport = self.conv_param(l.destport, params, session_data['params']) if None in (laddr, lport, daddr, dport): for p,n in (laddr, l.listenaddr), (lport, l.listenport), (daddr, l.destaddr), (dport, l.destport): if p == None: self.io.print_warning("Parameter %s referenced by tunnel %s not found. This tunnel will " "be ignored" % (n, l.name)) return self.io.print_success("Local Tunnel - %s" % l.name) # # Get the destination IP and port for the target # targetip = self.prompt_redir_fake('Destination IP', daddr) targetport = self.prompt_redir_fake('Destination Port', dport) # # Get the redirection addresses # redirip = self.prompt_redir(plugin, l.listenaddr, 'Listen IP', '127.0.0.1') if not dport: dport = targetport redirport = self.prompt_redir(plugin, l.listenport, 'Listen Port', lport) # # # session_data['params'][l.listenaddr] = targetip session_data['params'][l.listenport] = targetport # # Record the redirection tunnel # session_data['local'].append(LocalRedirection(l.protocol, redirip, redirport, targetip, targetport, name=l.name)) self.io.print_success("(%s) Local %s:%s -> %s:%s" % (l.protocol, redirip, redirport, targetip, targetport))
[ "def", "redirect_local", "(", "self", ",", "l", ",", "plugin", ",", "session_data", ")", ":", "# listenaddr - name of variable containing the LP IP", "# listenport - name of variable containing the LP Port", "# destaddr - name of variable containing the Target IP", "# destport - name of variable containing the Target Port", "# targetip - IP of the target", "# targetport - Port of the target ", "# redirip - IP of the LP", "# redirport - Port of the LP", "params", "=", "iDict", "(", "plugin", ".", "getParameters", "(", ")", ")", "# Get the defaults for the user prompt", "laddr", "=", "self", ".", "conv_param", "(", "l", ".", "listenaddr", ",", "params", ",", "session_data", "[", "'params'", "]", ")", "lport", "=", "self", ".", "conv_param", "(", "l", ".", "listenport", ",", "params", ",", "session_data", "[", "'params'", "]", ")", "daddr", "=", "self", ".", "conv_param", "(", "l", ".", "destaddr", ",", "params", ",", "session_data", "[", "'params'", "]", ")", "dport", "=", "self", ".", "conv_param", "(", "l", ".", "destport", ",", "params", ",", "session_data", "[", "'params'", "]", ")", "if", "None", "in", "(", "laddr", ",", "lport", ",", "daddr", ",", "dport", ")", ":", "for", "p", ",", "n", "in", "(", "laddr", ",", "l", ".", "listenaddr", ")", ",", "(", "lport", ",", "l", ".", "listenport", ")", ",", "(", "daddr", ",", "l", ".", "destaddr", ")", ",", "(", "dport", ",", "l", ".", "destport", ")", ":", "if", "p", "==", "None", ":", "self", ".", "io", ".", "print_warning", "(", "\"Parameter %s referenced by tunnel %s not found. This tunnel will \"", "\"be ignored\"", "%", "(", "n", ",", "l", ".", "name", ")", ")", "return", "self", ".", "io", ".", "print_success", "(", "\"Local Tunnel - %s\"", "%", "l", ".", "name", ")", "#", "# Get the destination IP and port for the target", "#", "targetip", "=", "self", ".", "prompt_redir_fake", "(", "'Destination IP'", ",", "daddr", ")", "targetport", "=", "self", ".", "prompt_redir_fake", "(", "'Destination Port'", ",", "dport", ")", "#", "# Get the redirection addresses", "# ", "redirip", "=", "self", ".", "prompt_redir", "(", "plugin", ",", "l", ".", "listenaddr", ",", "'Listen IP'", ",", "'127.0.0.1'", ")", "if", "not", "dport", ":", "dport", "=", "targetport", "redirport", "=", "self", ".", "prompt_redir", "(", "plugin", ",", "l", ".", "listenport", ",", "'Listen Port'", ",", "lport", ")", "#", "# ", "#", "session_data", "[", "'params'", "]", "[", "l", ".", "listenaddr", "]", "=", "targetip", "session_data", "[", "'params'", "]", "[", "l", ".", "listenport", "]", "=", "targetport", "# ", "# Record the redirection tunnel", "# ", "session_data", "[", "'local'", "]", ".", "append", "(", "LocalRedirection", "(", "l", ".", "protocol", ",", "redirip", ",", "redirport", ",", "targetip", ",", "targetport", ",", "name", "=", "l", ".", "name", ")", ")", "self", ".", "io", ".", "print_success", "(", "\"(%s) Local %s:%s -> %s:%s\"", "%", "(", "l", ".", "protocol", ",", "redirip", ",", "redirport", ",", "targetip", ",", "targetport", ")", ")" ]
https://github.com/RamadhanAmizudin/malware/blob/2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1/Fuzzbunch/fuzzbunch/redirection.py#L401-L464
jiangxiluning/FOTS.PyTorch
b1851c170b4f1ad18406766352cb5171648ce603
FOTS/utils/util.py
python
StringLabelConverter.encode
(self, text)
Support batch or single str. Args: text (str or list of str): texts to convert. Returns: torch.IntTensor [length_0 + length_1 + ... length_{n - 1}]: encoded texts. torch.IntTensor [n]: length of each text.
Support batch or single str. Args: text (str or list of str): texts to convert. Returns: torch.IntTensor [length_0 + length_1 + ... length_{n - 1}]: encoded texts. torch.IntTensor [n]: length of each text.
[ "Support", "batch", "or", "single", "str", ".", "Args", ":", "text", "(", "str", "or", "list", "of", "str", ")", ":", "texts", "to", "convert", ".", "Returns", ":", "torch", ".", "IntTensor", "[", "length_0", "+", "length_1", "+", "...", "length_", "{", "n", "-", "1", "}", "]", ":", "encoded", "texts", ".", "torch", ".", "IntTensor", "[", "n", "]", ":", "length", "of", "each", "text", "." ]
def encode(self, text): """Support batch or single str. Args: text (str or list of str): texts to convert. Returns: torch.IntTensor [length_0 + length_1 + ... length_{n - 1}]: encoded texts. torch.IntTensor [n]: length of each text. """ if isinstance(text, str): text = [ self.dict.get(char.lower() if self._ignore_case else char, self.dict['<other>']) for char in text ] length = [len(text)] return text, length elif isinstance(text, collections.Iterable): length = [len(s) for s in text] texts = [] for s in text: text = self.encode(s)[0] if len(text) > self.max_length: if self.raise_over_length: raise ValueError('{} is over length {}'.format(text, self.max_length)) else: text = text[:self.max_length] else: text = text + [len(self.dict) + 1] * (self.max_length - len(text)) texts.append(text) text = torch.tensor(texts, dtype=torch.long) return text, length
[ "def", "encode", "(", "self", ",", "text", ")", ":", "if", "isinstance", "(", "text", ",", "str", ")", ":", "text", "=", "[", "self", ".", "dict", ".", "get", "(", "char", ".", "lower", "(", ")", "if", "self", ".", "_ignore_case", "else", "char", ",", "self", ".", "dict", "[", "'<other>'", "]", ")", "for", "char", "in", "text", "]", "length", "=", "[", "len", "(", "text", ")", "]", "return", "text", ",", "length", "elif", "isinstance", "(", "text", ",", "collections", ".", "Iterable", ")", ":", "length", "=", "[", "len", "(", "s", ")", "for", "s", "in", "text", "]", "texts", "=", "[", "]", "for", "s", "in", "text", ":", "text", "=", "self", ".", "encode", "(", "s", ")", "[", "0", "]", "if", "len", "(", "text", ")", ">", "self", ".", "max_length", ":", "if", "self", ".", "raise_over_length", ":", "raise", "ValueError", "(", "'{} is over length {}'", ".", "format", "(", "text", ",", "self", ".", "max_length", ")", ")", "else", ":", "text", "=", "text", "[", ":", "self", ".", "max_length", "]", "else", ":", "text", "=", "text", "+", "[", "len", "(", "self", ".", "dict", ")", "+", "1", "]", "*", "(", "self", ".", "max_length", "-", "len", "(", "text", ")", ")", "texts", ".", "append", "(", "text", ")", "text", "=", "torch", ".", "tensor", "(", "texts", ",", "dtype", "=", "torch", ".", "long", ")", "return", "text", ",", "length" ]
https://github.com/jiangxiluning/FOTS.PyTorch/blob/b1851c170b4f1ad18406766352cb5171648ce603/FOTS/utils/util.py#L90-L121
wujian16/Cornell-MOE
df299d1be882d2af9796d7a68b3f9505cac7a53e
moe/optimal_learning/python/cpp_wrappers/domain.py
python
TensorProductDomain.__init__
(self, domain_bounds)
Construct a TensorProductDomain that can be used with cpp_wrappers.* functions/classes. :param domain_bounds: the boundaries of a dim-dimensional tensor-product domain :type domain_bounds: iterable of dim ClosedInterval
Construct a TensorProductDomain that can be used with cpp_wrappers.* functions/classes.
[ "Construct", "a", "TensorProductDomain", "that", "can", "be", "used", "with", "cpp_wrappers", ".", "*", "functions", "/", "classes", "." ]
def __init__(self, domain_bounds): """Construct a TensorProductDomain that can be used with cpp_wrappers.* functions/classes. :param domain_bounds: the boundaries of a dim-dimensional tensor-product domain :type domain_bounds: iterable of dim ClosedInterval """ self._domain_bounds = copy.deepcopy(domain_bounds) self._domain_type = C_GP.DomainTypes.tensor_product
[ "def", "__init__", "(", "self", ",", "domain_bounds", ")", ":", "self", ".", "_domain_bounds", "=", "copy", ".", "deepcopy", "(", "domain_bounds", ")", "self", ".", "_domain_type", "=", "C_GP", ".", "DomainTypes", ".", "tensor_product" ]
https://github.com/wujian16/Cornell-MOE/blob/df299d1be882d2af9796d7a68b3f9505cac7a53e/moe/optimal_learning/python/cpp_wrappers/domain.py#L27-L35
D-X-Y/caffe-faster-rcnn
eb50c97ff48f3df115d0e85fe0a32b0c7e2aa4cb
tools/extra/summarize.py
python
print_table
(table, max_width)
Print a simple nicely-aligned table. table must be a list of (equal-length) lists. Columns are space-separated, and as narrow as possible, but no wider than max_width. Text may overflow columns; note that unlike string.format, this will not affect subsequent columns, if possible.
Print a simple nicely-aligned table.
[ "Print", "a", "simple", "nicely", "-", "aligned", "table", "." ]
def print_table(table, max_width): """Print a simple nicely-aligned table. table must be a list of (equal-length) lists. Columns are space-separated, and as narrow as possible, but no wider than max_width. Text may overflow columns; note that unlike string.format, this will not affect subsequent columns, if possible.""" max_widths = [max_width] * len(table[0]) column_widths = [max(printed_len(row[j]) + 1 for row in table) for j in range(len(table[0]))] column_widths = [min(w, max_w) for w, max_w in zip(column_widths, max_widths)] for row in table: row_str = '' right_col = 0 for cell, width in zip(row, column_widths): right_col += width row_str += cell + ' ' row_str += ' ' * max(right_col - printed_len(row_str), 0) print row_str
[ "def", "print_table", "(", "table", ",", "max_width", ")", ":", "max_widths", "=", "[", "max_width", "]", "*", "len", "(", "table", "[", "0", "]", ")", "column_widths", "=", "[", "max", "(", "printed_len", "(", "row", "[", "j", "]", ")", "+", "1", "for", "row", "in", "table", ")", "for", "j", "in", "range", "(", "len", "(", "table", "[", "0", "]", ")", ")", "]", "column_widths", "=", "[", "min", "(", "w", ",", "max_w", ")", "for", "w", ",", "max_w", "in", "zip", "(", "column_widths", ",", "max_widths", ")", "]", "for", "row", "in", "table", ":", "row_str", "=", "''", "right_col", "=", "0", "for", "cell", ",", "width", "in", "zip", "(", "row", ",", "column_widths", ")", ":", "right_col", "+=", "width", "row_str", "+=", "cell", "+", "' '", "row_str", "+=", "' '", "*", "max", "(", "right_col", "-", "printed_len", "(", "row_str", ")", ",", "0", ")", "print", "row_str" ]
https://github.com/D-X-Y/caffe-faster-rcnn/blob/eb50c97ff48f3df115d0e85fe0a32b0c7e2aa4cb/tools/extra/summarize.py#L41-L61
leela-zero/leela-zero
e3ed6310d33d75078ba74c3adf887d18439fc2e3
training/tf/average_weights.py
python
swa
(inputs, output, weights=None)
Average weights of the weight files. inputs : List of filenames to use as inputs output : String of output filename weights : List of numbers to use for weighting the inputs
Average weights of the weight files.
[ "Average", "weights", "of", "the", "weight", "files", "." ]
def swa(inputs, output, weights=None): """ Average weights of the weight files. inputs : List of filenames to use as inputs output : String of output filename weights : List of numbers to use for weighting the inputs """ out_weights = [] if weights == None: weights = [1.0]*len(inputs) if len(weights) != len(inputs): raise ValueError("Number of weights doesn't match number of input files") # Normalize weights weights = [float(w)/sum(weights) for w in weights] for count, filename in enumerate(inputs): with open(filename, 'r') as f: weights_in = [] for line in f: weights_in.append(weights[count] * np.array(list(map(float, line.split(' '))))) if count == 0: out_weights = weights_in else: if len(out_weights) != len(weights_in): raise ValueError("Nets have different sizes") for e, w in enumerate(weights_in): if len(w) != len(out_weights[e]): raise ValueError("Nets have different sizes") out_weights[e] += w with open(output, 'w') as f: for e, w in enumerate(out_weights): if e == 0: #Version f.write('1\n') else: f.write(' '.join(map(str, w)) + '\n')
[ "def", "swa", "(", "inputs", ",", "output", ",", "weights", "=", "None", ")", ":", "out_weights", "=", "[", "]", "if", "weights", "==", "None", ":", "weights", "=", "[", "1.0", "]", "*", "len", "(", "inputs", ")", "if", "len", "(", "weights", ")", "!=", "len", "(", "inputs", ")", ":", "raise", "ValueError", "(", "\"Number of weights doesn't match number of input files\"", ")", "# Normalize weights", "weights", "=", "[", "float", "(", "w", ")", "/", "sum", "(", "weights", ")", "for", "w", "in", "weights", "]", "for", "count", ",", "filename", "in", "enumerate", "(", "inputs", ")", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "f", ":", "weights_in", "=", "[", "]", "for", "line", "in", "f", ":", "weights_in", ".", "append", "(", "weights", "[", "count", "]", "*", "np", ".", "array", "(", "list", "(", "map", "(", "float", ",", "line", ".", "split", "(", "' '", ")", ")", ")", ")", ")", "if", "count", "==", "0", ":", "out_weights", "=", "weights_in", "else", ":", "if", "len", "(", "out_weights", ")", "!=", "len", "(", "weights_in", ")", ":", "raise", "ValueError", "(", "\"Nets have different sizes\"", ")", "for", "e", ",", "w", "in", "enumerate", "(", "weights_in", ")", ":", "if", "len", "(", "w", ")", "!=", "len", "(", "out_weights", "[", "e", "]", ")", ":", "raise", "ValueError", "(", "\"Nets have different sizes\"", ")", "out_weights", "[", "e", "]", "+=", "w", "with", "open", "(", "output", ",", "'w'", ")", "as", "f", ":", "for", "e", ",", "w", "in", "enumerate", "(", "out_weights", ")", ":", "if", "e", "==", "0", ":", "#Version", "f", ".", "write", "(", "'1\\n'", ")", "else", ":", "f", ".", "write", "(", "' '", ".", "join", "(", "map", "(", "str", ",", "w", ")", ")", "+", "'\\n'", ")" ]
https://github.com/leela-zero/leela-zero/blob/e3ed6310d33d75078ba74c3adf887d18439fc2e3/training/tf/average_weights.py#L22-L62
timi-liuliang/echo
40a5a24d430eee4118314459ab7e03afcb3b8719
thirdparty/protobuf/python/google/protobuf/internal/python_message.py
python
_ExtensionDict.__setitem__
(self, extension_handle, value)
If extension_handle specifies a non-repeated, scalar extension field, sets the value of that field.
If extension_handle specifies a non-repeated, scalar extension field, sets the value of that field.
[ "If", "extension_handle", "specifies", "a", "non", "-", "repeated", "scalar", "extension", "field", "sets", "the", "value", "of", "that", "field", "." ]
def __setitem__(self, extension_handle, value): """If extension_handle specifies a non-repeated, scalar extension field, sets the value of that field. """ _VerifyExtensionHandle(self._extended_message, extension_handle) if (extension_handle.label == _FieldDescriptor.LABEL_REPEATED or extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE): raise TypeError( 'Cannot assign to extension "%s" because it is a repeated or ' 'composite type.' % extension_handle.full_name) # It's slightly wasteful to lookup the type checker each time, # but we expect this to be a vanishingly uncommon case anyway. type_checker = type_checkers.GetTypeChecker( extension_handle) # pylint: disable=protected-access self._extended_message._fields[extension_handle] = ( type_checker.CheckValue(value)) self._extended_message._Modified()
[ "def", "__setitem__", "(", "self", ",", "extension_handle", ",", "value", ")", ":", "_VerifyExtensionHandle", "(", "self", ".", "_extended_message", ",", "extension_handle", ")", "if", "(", "extension_handle", ".", "label", "==", "_FieldDescriptor", ".", "LABEL_REPEATED", "or", "extension_handle", ".", "cpp_type", "==", "_FieldDescriptor", ".", "CPPTYPE_MESSAGE", ")", ":", "raise", "TypeError", "(", "'Cannot assign to extension \"%s\" because it is a repeated or '", "'composite type.'", "%", "extension_handle", ".", "full_name", ")", "# It's slightly wasteful to lookup the type checker each time,", "# but we expect this to be a vanishingly uncommon case anyway.", "type_checker", "=", "type_checkers", ".", "GetTypeChecker", "(", "extension_handle", ")", "# pylint: disable=protected-access", "self", ".", "_extended_message", ".", "_fields", "[", "extension_handle", "]", "=", "(", "type_checker", ".", "CheckValue", "(", "value", ")", ")", "self", ".", "_extended_message", ".", "_Modified", "(", ")" ]
https://github.com/timi-liuliang/echo/blob/40a5a24d430eee4118314459ab7e03afcb3b8719/thirdparty/protobuf/python/google/protobuf/internal/python_message.py#L1220-L1240
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/signers.py
python
generate_presigned_post
(self, Bucket, Key, Fields=None, Conditions=None, ExpiresIn=3600)
return post_presigner.generate_presigned_post( request_dict=request_dict, fields=fields, conditions=conditions, expires_in=expires_in)
Builds the url and the form fields used for a presigned s3 post :type Bucket: string :param Bucket: The name of the bucket to presign the post to. Note that bucket related conditions should not be included in the ``conditions`` parameter. :type Key: string :param Key: Key name, optionally add ${filename} to the end to attach the submitted filename. Note that key related conditions and fields are filled out for you and should not be included in the ``Fields`` or ``Conditions`` parameter. :type Fields: dict :param Fields: A dictionary of prefilled form fields to build on top of. Elements that may be included are acl, Cache-Control, Content-Type, Content-Disposition, Content-Encoding, Expires, success_action_redirect, redirect, success_action_status, and x-amz-meta-. Note that if a particular element is included in the fields dictionary it will not be automatically added to the conditions list. You must specify a condition for the element as well. :type Conditions: list :param Conditions: A list of conditions to include in the policy. Each element can be either a list or a structure. For example: [ {"acl": "public-read"}, ["content-length-range", 2, 5], ["starts-with", "$success_action_redirect", ""] ] Conditions that are included may pertain to acl, content-length-range, Cache-Control, Content-Type, Content-Disposition, Content-Encoding, Expires, success_action_redirect, redirect, success_action_status, and/or x-amz-meta-. Note that if you include a condition, you must specify the a valid value in the fields dictionary as well. A value will not be added automatically to the fields dictionary based on the conditions. :type ExpiresIn: int :param ExpiresIn: The number of seconds the presigned post is valid for. :rtype: dict :returns: A dictionary with two elements: ``url`` and ``fields``. Url is the url to post to. Fields is a dictionary filled with the form fields and respective values to use when submitting the post. For example: {'url': 'https://mybucket.s3.amazonaws.com 'fields': {'acl': 'public-read', 'key': 'mykey', 'signature': 'mysignature', 'policy': 'mybase64 encoded policy'} }
Builds the url and the form fields used for a presigned s3 post
[ "Builds", "the", "url", "and", "the", "form", "fields", "used", "for", "a", "presigned", "s3", "post" ]
def generate_presigned_post(self, Bucket, Key, Fields=None, Conditions=None, ExpiresIn=3600): """Builds the url and the form fields used for a presigned s3 post :type Bucket: string :param Bucket: The name of the bucket to presign the post to. Note that bucket related conditions should not be included in the ``conditions`` parameter. :type Key: string :param Key: Key name, optionally add ${filename} to the end to attach the submitted filename. Note that key related conditions and fields are filled out for you and should not be included in the ``Fields`` or ``Conditions`` parameter. :type Fields: dict :param Fields: A dictionary of prefilled form fields to build on top of. Elements that may be included are acl, Cache-Control, Content-Type, Content-Disposition, Content-Encoding, Expires, success_action_redirect, redirect, success_action_status, and x-amz-meta-. Note that if a particular element is included in the fields dictionary it will not be automatically added to the conditions list. You must specify a condition for the element as well. :type Conditions: list :param Conditions: A list of conditions to include in the policy. Each element can be either a list or a structure. For example: [ {"acl": "public-read"}, ["content-length-range", 2, 5], ["starts-with", "$success_action_redirect", ""] ] Conditions that are included may pertain to acl, content-length-range, Cache-Control, Content-Type, Content-Disposition, Content-Encoding, Expires, success_action_redirect, redirect, success_action_status, and/or x-amz-meta-. Note that if you include a condition, you must specify the a valid value in the fields dictionary as well. A value will not be added automatically to the fields dictionary based on the conditions. :type ExpiresIn: int :param ExpiresIn: The number of seconds the presigned post is valid for. :rtype: dict :returns: A dictionary with two elements: ``url`` and ``fields``. Url is the url to post to. Fields is a dictionary filled with the form fields and respective values to use when submitting the post. For example: {'url': 'https://mybucket.s3.amazonaws.com 'fields': {'acl': 'public-read', 'key': 'mykey', 'signature': 'mysignature', 'policy': 'mybase64 encoded policy'} } """ bucket = Bucket key = Key fields = Fields conditions = Conditions expires_in = ExpiresIn if fields is None: fields = {} if conditions is None: conditions = [] post_presigner = S3PostPresigner(self._request_signer) serializer = self._serializer # We choose the CreateBucket operation model because its url gets # serialized to what a presign post requires. operation_model = self.meta.service_model.operation_model( 'CreateBucket') # Create a request dict based on the params to serialize. request_dict = serializer.serialize_to_request( {'Bucket': bucket}, operation_model) # Prepare the request dict by including the client's endpoint url. prepare_request_dict( request_dict, endpoint_url=self.meta.endpoint_url, context={ 'is_presign_request': True, 'use_global_endpoint': _should_use_global_endpoint(self), }, ) # Append that the bucket name to the list of conditions. conditions.append({'bucket': bucket}) # If the key ends with filename, the only constraint that can be # imposed is if it starts with the specified prefix. if key.endswith('${filename}'): conditions.append(["starts-with", '$key', key[:-len('${filename}')]]) else: conditions.append({'key': key}) # Add the key to the fields. fields['key'] = key return post_presigner.generate_presigned_post( request_dict=request_dict, fields=fields, conditions=conditions, expires_in=expires_in)
[ "def", "generate_presigned_post", "(", "self", ",", "Bucket", ",", "Key", ",", "Fields", "=", "None", ",", "Conditions", "=", "None", ",", "ExpiresIn", "=", "3600", ")", ":", "bucket", "=", "Bucket", "key", "=", "Key", "fields", "=", "Fields", "conditions", "=", "Conditions", "expires_in", "=", "ExpiresIn", "if", "fields", "is", "None", ":", "fields", "=", "{", "}", "if", "conditions", "is", "None", ":", "conditions", "=", "[", "]", "post_presigner", "=", "S3PostPresigner", "(", "self", ".", "_request_signer", ")", "serializer", "=", "self", ".", "_serializer", "# We choose the CreateBucket operation model because its url gets", "# serialized to what a presign post requires.", "operation_model", "=", "self", ".", "meta", ".", "service_model", ".", "operation_model", "(", "'CreateBucket'", ")", "# Create a request dict based on the params to serialize.", "request_dict", "=", "serializer", ".", "serialize_to_request", "(", "{", "'Bucket'", ":", "bucket", "}", ",", "operation_model", ")", "# Prepare the request dict by including the client's endpoint url.", "prepare_request_dict", "(", "request_dict", ",", "endpoint_url", "=", "self", ".", "meta", ".", "endpoint_url", ",", "context", "=", "{", "'is_presign_request'", ":", "True", ",", "'use_global_endpoint'", ":", "_should_use_global_endpoint", "(", "self", ")", ",", "}", ",", ")", "# Append that the bucket name to the list of conditions.", "conditions", ".", "append", "(", "{", "'bucket'", ":", "bucket", "}", ")", "# If the key ends with filename, the only constraint that can be", "# imposed is if it starts with the specified prefix.", "if", "key", ".", "endswith", "(", "'${filename}'", ")", ":", "conditions", ".", "append", "(", "[", "\"starts-with\"", ",", "'$key'", ",", "key", "[", ":", "-", "len", "(", "'${filename}'", ")", "]", "]", ")", "else", ":", "conditions", ".", "append", "(", "{", "'key'", ":", "key", "}", ")", "# Add the key to the fields.", "fields", "[", "'key'", "]", "=", "key", "return", "post_presigner", ".", "generate_presigned_post", "(", "request_dict", "=", "request_dict", ",", "fields", "=", "fields", ",", "conditions", "=", "conditions", ",", "expires_in", "=", "expires_in", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/signers.py#L605-L717
lammps/lammps
b75c3065430a75b1b5543a10e10f46d9b4c91913
tools/i-pi/ipi/utils/inputvalue.py
python
InputArray.parse
(self, xml=None, text="")
Reads the data for an array from an xml file. Args: xml: An xml_node object containing the all the data for the parent tag. text: The data held between the start and end tags.
Reads the data for an array from an xml file.
[ "Reads", "the", "data", "for", "an", "array", "from", "an", "xml", "file", "." ]
def parse(self, xml=None, text=""): """Reads the data for an array from an xml file. Args: xml: An xml_node object containing the all the data for the parent tag. text: The data held between the start and end tags. """ Input.parse(self, xml=xml, text=text) self.value = read_array(self.type, self._text) #if the shape is not specified, assume the array is linear. if self.shape.fetch() == (0,): self.shape.store((len(self.value),))
[ "def", "parse", "(", "self", ",", "xml", "=", "None", ",", "text", "=", "\"\"", ")", ":", "Input", ".", "parse", "(", "self", ",", "xml", "=", "xml", ",", "text", "=", "text", ")", "self", ".", "value", "=", "read_array", "(", "self", ".", "type", ",", "self", ".", "_text", ")", "#if the shape is not specified, assume the array is linear.", "if", "self", ".", "shape", ".", "fetch", "(", ")", "==", "(", "0", ",", ")", ":", "self", ".", "shape", ".", "store", "(", "(", "len", "(", "self", ".", "value", ")", ",", ")", ")" ]
https://github.com/lammps/lammps/blob/b75c3065430a75b1b5543a10e10f46d9b4c91913/tools/i-pi/ipi/utils/inputvalue.py#L954-L968
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/__init__.py
python
Misc.configure
(self, cnf=None, **kw)
return self._configure('configure', cnf, kw)
Configure resources of a widget. The values for resources are specified as keyword arguments. To get an overview about the allowed keyword arguments call the method keys.
Configure resources of a widget.
[ "Configure", "resources", "of", "a", "widget", "." ]
def configure(self, cnf=None, **kw): """Configure resources of a widget. The values for resources are specified as keyword arguments. To get an overview about the allowed keyword arguments call the method keys. """ return self._configure('configure', cnf, kw)
[ "def", "configure", "(", "self", ",", "cnf", "=", "None", ",", "*", "*", "kw", ")", ":", "return", "self", ".", "_configure", "(", "'configure'", ",", "cnf", ",", "kw", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/__init__.py#L1478-L1485
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/feature_column/serialization.py
python
deserialize_feature_columns
(configs, custom_objects=None)
return [ deserialize_feature_column(c, custom_objects, columns_by_name) for c in configs ]
Deserializes a list of FeatureColumns configs. Returns a list of FeatureColumns given a list of config dicts acquired by `serialize_feature_columns`. Args: configs: A list of Dicts with the serialization of feature columns acquired by `serialize_feature_columns`. custom_objects: A Dict from custom_object name to the associated keras serializable objects (FeatureColumns, classes or functions). Returns: FeatureColumn objects corresponding to the input configs. Raises: ValueError if called with input that is not a list of FeatureColumns.
Deserializes a list of FeatureColumns configs.
[ "Deserializes", "a", "list", "of", "FeatureColumns", "configs", "." ]
def deserialize_feature_columns(configs, custom_objects=None): """Deserializes a list of FeatureColumns configs. Returns a list of FeatureColumns given a list of config dicts acquired by `serialize_feature_columns`. Args: configs: A list of Dicts with the serialization of feature columns acquired by `serialize_feature_columns`. custom_objects: A Dict from custom_object name to the associated keras serializable objects (FeatureColumns, classes or functions). Returns: FeatureColumn objects corresponding to the input configs. Raises: ValueError if called with input that is not a list of FeatureColumns. """ columns_by_name = {} return [ deserialize_feature_column(c, custom_objects, columns_by_name) for c in configs ]
[ "def", "deserialize_feature_columns", "(", "configs", ",", "custom_objects", "=", "None", ")", ":", "columns_by_name", "=", "{", "}", "return", "[", "deserialize_feature_column", "(", "c", ",", "custom_objects", ",", "columns_by_name", ")", "for", "c", "in", "configs", "]" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/feature_column/serialization.py#L170-L192
plumonito/dtslam
5994bb9cf7a11981b830370db206bceb654c085d
3rdparty/opencv-git/3rdparty/jinja2/lexer.py
python
describe_token
(token)
return _describe_token_type(token.type)
Returns a description of the token.
Returns a description of the token.
[ "Returns", "a", "description", "of", "the", "token", "." ]
def describe_token(token): """Returns a description of the token.""" if token.type == 'name': return token.value return _describe_token_type(token.type)
[ "def", "describe_token", "(", "token", ")", ":", "if", "token", ".", "type", "==", "'name'", ":", "return", "token", ".", "value", "return", "_describe_token_type", "(", "token", ".", "type", ")" ]
https://github.com/plumonito/dtslam/blob/5994bb9cf7a11981b830370db206bceb654c085d/3rdparty/opencv-git/3rdparty/jinja2/lexer.py#L164-L168
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/contrib/metrics/python/ops/metric_ops.py
python
streaming_mean_iou
(predictions, labels, num_classes, weights=None, metrics_collections=None, updates_collections=None, name=None)
return metrics.mean_iou( num_classes=num_classes, predictions=predictions, labels=labels, weights=weights, metrics_collections=metrics_collections, updates_collections=updates_collections, name=name)
Calculate per-step mean Intersection-Over-Union (mIOU). Mean Intersection-Over-Union is a common evaluation metric for semantic image segmentation, which first computes the IOU for each semantic class and then computes the average over classes. IOU is defined as follows: IOU = true_positive / (true_positive + false_positive + false_negative). The predictions are accumulated in a confusion matrix, weighted by `weights`, and mIOU is then calculated from it. For estimation of the metric over a stream of data, the function creates an `update_op` operation that updates these variables and returns the `mean_iou`. If `weights` is `None`, weights default to 1. Use weights of 0 to mask values. Args: predictions: A `Tensor` of prediction results for semantic labels, whose shape is [batch size] and type `int32` or `int64`. The tensor will be flattened, if its rank > 1. labels: A `Tensor` of ground truth labels with shape [batch size] and of type `int32` or `int64`. The tensor will be flattened, if its rank > 1. num_classes: The possible number of labels the prediction task can have. This value must be provided, since a confusion matrix of dimension = [num_classes, num_classes] will be allocated. weights: An optional `Tensor` whose shape is broadcastable to `predictions`. metrics_collections: An optional list of collections that `mean_iou` should be added to. updates_collections: An optional list of collections `update_op` should be added to. name: An optional variable_scope name. Returns: mean_iou: A `Tensor` representing the mean intersection-over-union. update_op: An operation that increments the confusion matrix. Raises: ValueError: If `predictions` and `labels` have mismatched shapes, or if `weights` is not `None` and its shape doesn't match `predictions`, or if either `metrics_collections` or `updates_collections` are not a list or tuple.
Calculate per-step mean Intersection-Over-Union (mIOU).
[ "Calculate", "per", "-", "step", "mean", "Intersection", "-", "Over", "-", "Union", "(", "mIOU", ")", "." ]
def streaming_mean_iou(predictions, labels, num_classes, weights=None, metrics_collections=None, updates_collections=None, name=None): """Calculate per-step mean Intersection-Over-Union (mIOU). Mean Intersection-Over-Union is a common evaluation metric for semantic image segmentation, which first computes the IOU for each semantic class and then computes the average over classes. IOU is defined as follows: IOU = true_positive / (true_positive + false_positive + false_negative). The predictions are accumulated in a confusion matrix, weighted by `weights`, and mIOU is then calculated from it. For estimation of the metric over a stream of data, the function creates an `update_op` operation that updates these variables and returns the `mean_iou`. If `weights` is `None`, weights default to 1. Use weights of 0 to mask values. Args: predictions: A `Tensor` of prediction results for semantic labels, whose shape is [batch size] and type `int32` or `int64`. The tensor will be flattened, if its rank > 1. labels: A `Tensor` of ground truth labels with shape [batch size] and of type `int32` or `int64`. The tensor will be flattened, if its rank > 1. num_classes: The possible number of labels the prediction task can have. This value must be provided, since a confusion matrix of dimension = [num_classes, num_classes] will be allocated. weights: An optional `Tensor` whose shape is broadcastable to `predictions`. metrics_collections: An optional list of collections that `mean_iou` should be added to. updates_collections: An optional list of collections `update_op` should be added to. name: An optional variable_scope name. Returns: mean_iou: A `Tensor` representing the mean intersection-over-union. update_op: An operation that increments the confusion matrix. Raises: ValueError: If `predictions` and `labels` have mismatched shapes, or if `weights` is not `None` and its shape doesn't match `predictions`, or if either `metrics_collections` or `updates_collections` are not a list or tuple. """ return metrics.mean_iou( num_classes=num_classes, predictions=predictions, labels=labels, weights=weights, metrics_collections=metrics_collections, updates_collections=updates_collections, name=name)
[ "def", "streaming_mean_iou", "(", "predictions", ",", "labels", ",", "num_classes", ",", "weights", "=", "None", ",", "metrics_collections", "=", "None", ",", "updates_collections", "=", "None", ",", "name", "=", "None", ")", ":", "return", "metrics", ".", "mean_iou", "(", "num_classes", "=", "num_classes", ",", "predictions", "=", "predictions", ",", "labels", "=", "labels", ",", "weights", "=", "weights", ",", "metrics_collections", "=", "metrics_collections", ",", "updates_collections", "=", "updates_collections", ",", "name", "=", "name", ")" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/metrics/python/ops/metric_ops.py#L2194-L2245
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/_pydecimal.py
python
Decimal.is_qnan
(self)
return self._exp == 'n'
Return True if self is a quiet NaN; otherwise return False.
Return True if self is a quiet NaN; otherwise return False.
[ "Return", "True", "if", "self", "is", "a", "quiet", "NaN", ";", "otherwise", "return", "False", "." ]
def is_qnan(self): """Return True if self is a quiet NaN; otherwise return False.""" return self._exp == 'n'
[ "def", "is_qnan", "(", "self", ")", ":", "return", "self", ".", "_exp", "==", "'n'" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/_pydecimal.py#L3143-L3145
CRYTEK/CRYENGINE
232227c59a220cbbd311576f0fbeba7bb53b2a8c
Editor/Python/windows/Lib/site-packages/pip/_vendor/retrying.py
python
Retrying.incrementing_sleep
(self, previous_attempt_number, delay_since_first_attempt_ms)
return result
Sleep an incremental amount of time after each attempt, starting at wait_incrementing_start and incrementing by wait_incrementing_increment
Sleep an incremental amount of time after each attempt, starting at wait_incrementing_start and incrementing by wait_incrementing_increment
[ "Sleep", "an", "incremental", "amount", "of", "time", "after", "each", "attempt", "starting", "at", "wait_incrementing_start", "and", "incrementing", "by", "wait_incrementing_increment" ]
def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): """ Sleep an incremental amount of time after each attempt, starting at wait_incrementing_start and incrementing by wait_incrementing_increment """ result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1)) if result < 0: result = 0 return result
[ "def", "incrementing_sleep", "(", "self", ",", "previous_attempt_number", ",", "delay_since_first_attempt_ms", ")", ":", "result", "=", "self", ".", "_wait_incrementing_start", "+", "(", "self", ".", "_wait_incrementing_increment", "*", "(", "previous_attempt_number", "-", "1", ")", ")", "if", "result", "<", "0", ":", "result", "=", "0", "return", "result" ]
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/pip/_vendor/retrying.py#L161-L169
irods/irods
ed6328646cee87182098d569919004049bf4ce21
scripts/irods/pyparsing.py
python
ParseResults.asList
( self )
return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]
Returns the parse results as a nested list of matching tokens, all converted to strings.
Returns the parse results as a nested list of matching tokens, all converted to strings.
[ "Returns", "the", "parse", "results", "as", "a", "nested", "list", "of", "matching", "tokens", "all", "converted", "to", "strings", "." ]
def asList( self ): """Returns the parse results as a nested list of matching tokens, all converted to strings.""" return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]
[ "def", "asList", "(", "self", ")", ":", "return", "[", "res", ".", "asList", "(", ")", "if", "isinstance", "(", "res", ",", "ParseResults", ")", "else", "res", "for", "res", "in", "self", ".", "__toklist", "]" ]
https://github.com/irods/irods/blob/ed6328646cee87182098d569919004049bf4ce21/scripts/irods/pyparsing.py#L534-L536
pskun/finance_news_analysis
6ac13e32deede37a4cf57bba8b2897941ae3d80d
utils/util_func.py
python
atof
(f)
return f
字符串转浮点数
字符串转浮点数
[ "字符串转浮点数" ]
def atof(f): ''' 字符串转浮点数 ''' try: f = float(f) except ValueError: f = None except TypeError: f = None return f
[ "def", "atof", "(", "f", ")", ":", "try", ":", "f", "=", "float", "(", "f", ")", "except", "ValueError", ":", "f", "=", "None", "except", "TypeError", ":", "f", "=", "None", "return", "f" ]
https://github.com/pskun/finance_news_analysis/blob/6ac13e32deede37a4cf57bba8b2897941ae3d80d/utils/util_func.py#L15-L23
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/distribute/one_device_strategy.py
python
OneDeviceExtended._global_batch_size
(self)
return True
Global and per-replica batching are equivalent for OneDeviceStrategy.
Global and per-replica batching are equivalent for OneDeviceStrategy.
[ "Global", "and", "per", "-", "replica", "batching", "are", "equivalent", "for", "OneDeviceStrategy", "." ]
def _global_batch_size(self): """Global and per-replica batching are equivalent for OneDeviceStrategy.""" return True
[ "def", "_global_batch_size", "(", "self", ")", ":", "return", "True" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/distribute/one_device_strategy.py#L466-L468
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_internal/network/lazy_wheel.py
python
LazyZipOverHTTP.mode
(self)
return 'rb'
Opening mode, which is always rb.
Opening mode, which is always rb.
[ "Opening", "mode", "which", "is", "always", "rb", "." ]
def mode(self): # type: () -> str """Opening mode, which is always rb.""" return 'rb'
[ "def", "mode", "(", "self", ")", ":", "# type: () -> str", "return", "'rb'" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_internal/network/lazy_wheel.py#L72-L75
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_core.py
python
Window.Refresh
(*args, **kwargs)
return _core_.Window_Refresh(*args, **kwargs)
Refresh(self, bool eraseBackground=True, Rect rect=None) Mark the specified rectangle (or the whole window) as "dirty" so it will be repainted. Causes an EVT_PAINT event to be generated and sent to the window.
Refresh(self, bool eraseBackground=True, Rect rect=None)
[ "Refresh", "(", "self", "bool", "eraseBackground", "=", "True", "Rect", "rect", "=", "None", ")" ]
def Refresh(*args, **kwargs): """ Refresh(self, bool eraseBackground=True, Rect rect=None) Mark the specified rectangle (or the whole window) as "dirty" so it will be repainted. Causes an EVT_PAINT event to be generated and sent to the window. """ return _core_.Window_Refresh(*args, **kwargs)
[ "def", "Refresh", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Window_Refresh", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L10665-L10673
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib-tk/Tkinter.py
python
Checkbutton.select
(self)
Put the button in on-state.
Put the button in on-state.
[ "Put", "the", "button", "in", "on", "-", "state", "." ]
def select(self): """Put the button in on-state.""" self.tk.call(self._w, 'select')
[ "def", "select", "(", "self", ")", ":", "self", ".", "tk", ".", "call", "(", "self", ".", "_w", ",", "'select'", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib-tk/Tkinter.py#L2427-L2429
abyzovlab/CNVnator
c73786d6160f17b020feae928148533ca036fad2
pytools/io.py
python
IO.get_tree
(self, chr, signal)
return True
ToDo - read tree and return arrays
ToDo - read tree and return arrays
[ "ToDo", "-", "read", "tree", "and", "return", "arrays" ]
def get_tree(self, chr, signal): """ToDo - read tree and return arrays""" return True
[ "def", "get_tree", "(", "self", ",", "chr", ",", "signal", ")", ":", "return", "True" ]
https://github.com/abyzovlab/CNVnator/blob/c73786d6160f17b020feae928148533ca036fad2/pytools/io.py#L115-L117
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/gettext.py
python
test
(condition, true, false)
Implements the C expression: condition ? true : false Required to correctly interpret plural forms.
Implements the C expression:
[ "Implements", "the", "C", "expression", ":" ]
def test(condition, true, false): """ Implements the C expression: condition ? true : false Required to correctly interpret plural forms. """ if condition: return true else: return false
[ "def", "test", "(", "condition", ",", "true", ",", "false", ")", ":", "if", "condition", ":", "return", "true", "else", ":", "return", "false" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/gettext.py#L61-L72
apple/swift-lldb
d74be846ef3e62de946df343e8c234bde93a8912
scripts/Python/static-binding/lldb.py
python
SBType.get_members_array
(self)
return members
An accessor function that returns a list() that contains all members (base classes and fields) in a lldb.SBType object in ascending bit offset order.
An accessor function that returns a list() that contains all members (base classes and fields) in a lldb.SBType object in ascending bit offset order.
[ "An", "accessor", "function", "that", "returns", "a", "list", "()", "that", "contains", "all", "members", "(", "base", "classes", "and", "fields", ")", "in", "a", "lldb", ".", "SBType", "object", "in", "ascending", "bit", "offset", "order", "." ]
def get_members_array(self): '''An accessor function that returns a list() that contains all members (base classes and fields) in a lldb.SBType object in ascending bit offset order.''' members = [] bases = self.get_bases_array() fields = self.get_fields_array() vbases = self.get_vbases_array() for base in bases: bit_offset = base.bit_offset added = False for idx, member in enumerate(members): if member.bit_offset > bit_offset: members.insert(idx, base) added = True break if not added: members.append(base) for vbase in vbases: bit_offset = vbase.bit_offset added = False for idx, member in enumerate(members): if member.bit_offset > bit_offset: members.insert(idx, vbase) added = True break if not added: members.append(vbase) for field in fields: bit_offset = field.bit_offset added = False for idx, member in enumerate(members): if member.bit_offset > bit_offset: members.insert(idx, field) added = True break if not added: members.append(field) return members
[ "def", "get_members_array", "(", "self", ")", ":", "members", "=", "[", "]", "bases", "=", "self", ".", "get_bases_array", "(", ")", "fields", "=", "self", ".", "get_fields_array", "(", ")", "vbases", "=", "self", ".", "get_vbases_array", "(", ")", "for", "base", "in", "bases", ":", "bit_offset", "=", "base", ".", "bit_offset", "added", "=", "False", "for", "idx", ",", "member", "in", "enumerate", "(", "members", ")", ":", "if", "member", ".", "bit_offset", ">", "bit_offset", ":", "members", ".", "insert", "(", "idx", ",", "base", ")", "added", "=", "True", "break", "if", "not", "added", ":", "members", ".", "append", "(", "base", ")", "for", "vbase", "in", "vbases", ":", "bit_offset", "=", "vbase", ".", "bit_offset", "added", "=", "False", "for", "idx", ",", "member", "in", "enumerate", "(", "members", ")", ":", "if", "member", ".", "bit_offset", ">", "bit_offset", ":", "members", ".", "insert", "(", "idx", ",", "vbase", ")", "added", "=", "True", "break", "if", "not", "added", ":", "members", ".", "append", "(", "vbase", ")", "for", "field", "in", "fields", ":", "bit_offset", "=", "field", ".", "bit_offset", "added", "=", "False", "for", "idx", ",", "member", "in", "enumerate", "(", "members", ")", ":", "if", "member", ".", "bit_offset", ">", "bit_offset", ":", "members", ".", "insert", "(", "idx", ",", "field", ")", "added", "=", "True", "break", "if", "not", "added", ":", "members", ".", "append", "(", "field", ")", "return", "members" ]
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L12873-L12909
openmm/openmm
cb293447c4fc8b03976dfe11399f107bab70f3d9
wrappers/python/openmm/app/internal/amber_file_parser.py
python
PrmtopLoader.getDihedrals
(self)
return self._dihedralList
Return list of atom quads, K, phase and periodicity for each dihedral angle
Return list of atom quads, K, phase and periodicity for each dihedral angle
[ "Return", "list", "of", "atom", "quads", "K", "phase", "and", "periodicity", "for", "each", "dihedral", "angle" ]
def getDihedrals(self): """Return list of atom quads, K, phase and periodicity for each dihedral angle""" try: return self._dihedralList except AttributeError: pass forceConstant=self._raw_data["DIHEDRAL_FORCE_CONSTANT"] phase=self._raw_data["DIHEDRAL_PHASE"] periodicity=self._raw_data["DIHEDRAL_PERIODICITY"] dihedralPointers = self._raw_data["DIHEDRALS_INC_HYDROGEN"] \ +self._raw_data["DIHEDRALS_WITHOUT_HYDROGEN"] self._dihedralList=[] forceConstConversionFactor = (units.kilocalorie_per_mole).conversion_factor_to(units.kilojoule_per_mole) for ii in range(0,len(dihedralPointers),5): if int(dihedralPointers[ii])<0 or int(dihedralPointers[ii+1])<0: raise Exception("Found negative dihedral atom pointers %s" % ((dihedralPointers[ii], dihedralPointers[ii+1], dihedralPointers[ii+2], dihedralPointers[ii+3]),)) iType=int(dihedralPointers[ii+4])-1 self._dihedralList.append((int(dihedralPointers[ii])//3, int(dihedralPointers[ii+1])//3, abs(int(dihedralPointers[ii+2]))//3, abs(int(dihedralPointers[ii+3]))//3, float(forceConstant[iType])*forceConstConversionFactor, float(phase[iType]), int(0.5+float(periodicity[iType])))) return self._dihedralList
[ "def", "getDihedrals", "(", "self", ")", ":", "try", ":", "return", "self", ".", "_dihedralList", "except", "AttributeError", ":", "pass", "forceConstant", "=", "self", ".", "_raw_data", "[", "\"DIHEDRAL_FORCE_CONSTANT\"", "]", "phase", "=", "self", ".", "_raw_data", "[", "\"DIHEDRAL_PHASE\"", "]", "periodicity", "=", "self", ".", "_raw_data", "[", "\"DIHEDRAL_PERIODICITY\"", "]", "dihedralPointers", "=", "self", ".", "_raw_data", "[", "\"DIHEDRALS_INC_HYDROGEN\"", "]", "+", "self", ".", "_raw_data", "[", "\"DIHEDRALS_WITHOUT_HYDROGEN\"", "]", "self", ".", "_dihedralList", "=", "[", "]", "forceConstConversionFactor", "=", "(", "units", ".", "kilocalorie_per_mole", ")", ".", "conversion_factor_to", "(", "units", ".", "kilojoule_per_mole", ")", "for", "ii", "in", "range", "(", "0", ",", "len", "(", "dihedralPointers", ")", ",", "5", ")", ":", "if", "int", "(", "dihedralPointers", "[", "ii", "]", ")", "<", "0", "or", "int", "(", "dihedralPointers", "[", "ii", "+", "1", "]", ")", "<", "0", ":", "raise", "Exception", "(", "\"Found negative dihedral atom pointers %s\"", "%", "(", "(", "dihedralPointers", "[", "ii", "]", ",", "dihedralPointers", "[", "ii", "+", "1", "]", ",", "dihedralPointers", "[", "ii", "+", "2", "]", ",", "dihedralPointers", "[", "ii", "+", "3", "]", ")", ",", ")", ")", "iType", "=", "int", "(", "dihedralPointers", "[", "ii", "+", "4", "]", ")", "-", "1", "self", ".", "_dihedralList", ".", "append", "(", "(", "int", "(", "dihedralPointers", "[", "ii", "]", ")", "//", "3", ",", "int", "(", "dihedralPointers", "[", "ii", "+", "1", "]", ")", "//", "3", ",", "abs", "(", "int", "(", "dihedralPointers", "[", "ii", "+", "2", "]", ")", ")", "//", "3", ",", "abs", "(", "int", "(", "dihedralPointers", "[", "ii", "+", "3", "]", ")", ")", "//", "3", ",", "float", "(", "forceConstant", "[", "iType", "]", ")", "*", "forceConstConversionFactor", ",", "float", "(", "phase", "[", "iType", "]", ")", ",", "int", "(", "0.5", "+", "float", "(", "periodicity", "[", "iType", "]", ")", ")", ")", ")", "return", "self", ".", "_dihedralList" ]
https://github.com/openmm/openmm/blob/cb293447c4fc8b03976dfe11399f107bab70f3d9/wrappers/python/openmm/app/internal/amber_file_parser.py#L456-L484
CRYTEK/CRYENGINE
232227c59a220cbbd311576f0fbeba7bb53b2a8c
Code/Tools/waf-1.7.13/waflib/Build.py
python
BuildContext.add_manual_dependency
(self, path, value)
Adds a dependency from a node object to a value:: def build(bld): bld.add_manual_dependency( bld.path.find_resource('wscript'), bld.root.find_resource('/etc/fstab')) :param path: file path :type path: string or :py:class:`waflib.Node.Node` :param value: value to depend on :type value: :py:class:`waflib.Node.Node`, string, or function returning a string
Adds a dependency from a node object to a value::
[ "Adds", "a", "dependency", "from", "a", "node", "object", "to", "a", "value", "::" ]
def add_manual_dependency(self, path, value): """ Adds a dependency from a node object to a value:: def build(bld): bld.add_manual_dependency( bld.path.find_resource('wscript'), bld.root.find_resource('/etc/fstab')) :param path: file path :type path: string or :py:class:`waflib.Node.Node` :param value: value to depend on :type value: :py:class:`waflib.Node.Node`, string, or function returning a string """ if path is None: raise ValueError('Invalid input') if isinstance(path, waflib.Node.Node): node = path elif os.path.isabs(path): node = self.root.find_resource(path) else: node = self.path.find_resource(path) if isinstance(value, list): self.deps_man[id(node)].extend(value) else: self.deps_man[id(node)].append(value)
[ "def", "add_manual_dependency", "(", "self", ",", "path", ",", "value", ")", ":", "if", "path", "is", "None", ":", "raise", "ValueError", "(", "'Invalid input'", ")", "if", "isinstance", "(", "path", ",", "waflib", ".", "Node", ".", "Node", ")", ":", "node", "=", "path", "elif", "os", ".", "path", ".", "isabs", "(", "path", ")", ":", "node", "=", "self", ".", "root", ".", "find_resource", "(", "path", ")", "else", ":", "node", "=", "self", ".", "path", ".", "find_resource", "(", "path", ")", "if", "isinstance", "(", "value", ",", "list", ")", ":", "self", ".", "deps_man", "[", "id", "(", "node", ")", "]", ".", "extend", "(", "value", ")", "else", ":", "self", ".", "deps_man", "[", "id", "(", "node", ")", "]", ".", "append", "(", "value", ")" ]
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Code/Tools/waf-1.7.13/waflib/Build.py#L434-L461
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/propgrid.py
python
PyEditor.__init__
(self, *args, **kwargs)
__init__(self) -> PyEditor
__init__(self) -> PyEditor
[ "__init__", "(", "self", ")", "-", ">", "PyEditor" ]
def __init__(self, *args, **kwargs): """__init__(self) -> PyEditor""" _propgrid.PyEditor_swiginit(self,_propgrid.new_PyEditor(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods()
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_propgrid", ".", "PyEditor_swiginit", "(", "self", ",", "_propgrid", ".", "new_PyEditor", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")", "self", ".", "_SetSelf", "(", "self", ")", "self", ".", "_RegisterMethods", "(", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/propgrid.py#L4410-L4413
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/roc/hsadrv/driver.py
python
Stream._get_last_signal
(self)
return self._signals[-1] if self._signals else None
Get the last signal.
Get the last signal.
[ "Get", "the", "last", "signal", "." ]
def _get_last_signal(self): """ Get the last signal. """ return self._signals[-1] if self._signals else None
[ "def", "_get_last_signal", "(", "self", ")", ":", "return", "self", ".", "_signals", "[", "-", "1", "]", "if", "self", ".", "_signals", "else", "None" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/roc/hsadrv/driver.py#L1365-L1369
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/plotting/_core.py
python
boxplot_frame_groupby
( grouped, subplots=True, column=None, fontsize=None, rot=0, grid=True, ax=None, figsize=None, layout=None, sharex=False, sharey=True, backend=None, **kwargs, )
return plot_backend.boxplot_frame_groupby( grouped, subplots=subplots, column=column, fontsize=fontsize, rot=rot, grid=grid, ax=ax, figsize=figsize, layout=layout, sharex=sharex, sharey=sharey, **kwargs, )
Make box plots from DataFrameGroupBy data. Parameters ---------- grouped : Grouped DataFrame subplots : bool * ``False`` - no subplots will be used * ``True`` - create a subplot for each group. column : column name or list of names, or vector Can be any valid input to groupby. fontsize : int or str rot : label rotation angle grid : Setting this to True will show the grid ax : Matplotlib axis object, default None figsize : A tuple (width, height) in inches layout : tuple (optional) The layout of the plot: (rows, columns). sharex : bool, default False Whether x-axes will be shared among subplots. .. versionadded:: 0.23.1 sharey : bool, default True Whether y-axes will be shared among subplots. .. versionadded:: 0.23.1 backend : str, default None Backend to use instead of the backend specified in the option ``plotting.backend``. For instance, 'matplotlib'. Alternatively, to specify the ``plotting.backend`` for the whole session, set ``pd.options.plotting.backend``. .. versionadded:: 1.0.0 **kwargs All other plotting keyword arguments to be passed to matplotlib's boxplot function. Returns ------- dict of key/value = group key/DataFrame.boxplot return value or DataFrame.boxplot return value in case subplots=figures=False Examples -------- >>> import itertools >>> tuples = [t for t in itertools.product(range(1000), range(4))] >>> index = pd.MultiIndex.from_tuples(tuples, names=['lvl0', 'lvl1']) >>> data = np.random.randn(len(index),4) >>> df = pd.DataFrame(data, columns=list('ABCD'), index=index) >>> >>> grouped = df.groupby(level='lvl1') >>> boxplot_frame_groupby(grouped) >>> >>> grouped = df.unstack(level='lvl1').groupby(level=0, axis=1) >>> boxplot_frame_groupby(grouped, subplots=False)
Make box plots from DataFrameGroupBy data.
[ "Make", "box", "plots", "from", "DataFrameGroupBy", "data", "." ]
def boxplot_frame_groupby( grouped, subplots=True, column=None, fontsize=None, rot=0, grid=True, ax=None, figsize=None, layout=None, sharex=False, sharey=True, backend=None, **kwargs, ): """ Make box plots from DataFrameGroupBy data. Parameters ---------- grouped : Grouped DataFrame subplots : bool * ``False`` - no subplots will be used * ``True`` - create a subplot for each group. column : column name or list of names, or vector Can be any valid input to groupby. fontsize : int or str rot : label rotation angle grid : Setting this to True will show the grid ax : Matplotlib axis object, default None figsize : A tuple (width, height) in inches layout : tuple (optional) The layout of the plot: (rows, columns). sharex : bool, default False Whether x-axes will be shared among subplots. .. versionadded:: 0.23.1 sharey : bool, default True Whether y-axes will be shared among subplots. .. versionadded:: 0.23.1 backend : str, default None Backend to use instead of the backend specified in the option ``plotting.backend``. For instance, 'matplotlib'. Alternatively, to specify the ``plotting.backend`` for the whole session, set ``pd.options.plotting.backend``. .. versionadded:: 1.0.0 **kwargs All other plotting keyword arguments to be passed to matplotlib's boxplot function. Returns ------- dict of key/value = group key/DataFrame.boxplot return value or DataFrame.boxplot return value in case subplots=figures=False Examples -------- >>> import itertools >>> tuples = [t for t in itertools.product(range(1000), range(4))] >>> index = pd.MultiIndex.from_tuples(tuples, names=['lvl0', 'lvl1']) >>> data = np.random.randn(len(index),4) >>> df = pd.DataFrame(data, columns=list('ABCD'), index=index) >>> >>> grouped = df.groupby(level='lvl1') >>> boxplot_frame_groupby(grouped) >>> >>> grouped = df.unstack(level='lvl1').groupby(level=0, axis=1) >>> boxplot_frame_groupby(grouped, subplots=False) """ plot_backend = _get_plot_backend(backend) return plot_backend.boxplot_frame_groupby( grouped, subplots=subplots, column=column, fontsize=fontsize, rot=rot, grid=grid, ax=ax, figsize=figsize, layout=layout, sharex=sharex, sharey=sharey, **kwargs, )
[ "def", "boxplot_frame_groupby", "(", "grouped", ",", "subplots", "=", "True", ",", "column", "=", "None", ",", "fontsize", "=", "None", ",", "rot", "=", "0", ",", "grid", "=", "True", ",", "ax", "=", "None", ",", "figsize", "=", "None", ",", "layout", "=", "None", ",", "sharex", "=", "False", ",", "sharey", "=", "True", ",", "backend", "=", "None", ",", "*", "*", "kwargs", ",", ")", ":", "plot_backend", "=", "_get_plot_backend", "(", "backend", ")", "return", "plot_backend", ".", "boxplot_frame_groupby", "(", "grouped", ",", "subplots", "=", "subplots", ",", "column", "=", "column", ",", "fontsize", "=", "fontsize", ",", "rot", "=", "rot", ",", "grid", "=", "grid", ",", "ax", "=", "ax", ",", "figsize", "=", "figsize", ",", "layout", "=", "layout", ",", "sharex", "=", "sharex", ",", "sharey", "=", "sharey", ",", "*", "*", "kwargs", ",", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/plotting/_core.py#L451-L538
dmlc/treelite
df56babb6a4a2d7c29d719c28ce53acfa7dbab3c
python/setup.py
python
BuildExt.build
(self, src_dir, build_dir, generator, build_tool=None)
Build the core library with CMake.
Build the core library with CMake.
[ "Build", "the", "core", "library", "with", "CMake", "." ]
def build(self, src_dir, build_dir, generator, build_tool=None): """Build the core library with CMake.""" cmake_cmd = ['cmake', src_dir, generator] # Flag for cross-compiling for Apple Silicon # We use environment variable because it's the only way to pass down custom flags # through the cibuildwheel package, which otherwise calls `python setup.py bdist_wheel` # command. if 'CIBW_TARGET_OSX_ARM64' in os.environ: cmake_cmd.append("-DCMAKE_OSX_ARCHITECTURES=arm64") self.logger.info('Run CMake command: %s', str(cmake_cmd)) subprocess.check_call(cmake_cmd, cwd=build_dir) if system() != 'Windows': nproc = os.cpu_count() build_cmd = [build_tool, 'treelite', '-j' + str(nproc)] subprocess.check_call(build_cmd, cwd=build_dir) else: subprocess.check_call(['cmake', '--build', '.', '--config', 'Release', '--target', 'treelite'], cwd=build_dir)
[ "def", "build", "(", "self", ",", "src_dir", ",", "build_dir", ",", "generator", ",", "build_tool", "=", "None", ")", ":", "cmake_cmd", "=", "[", "'cmake'", ",", "src_dir", ",", "generator", "]", "# Flag for cross-compiling for Apple Silicon", "# We use environment variable because it's the only way to pass down custom flags", "# through the cibuildwheel package, which otherwise calls `python setup.py bdist_wheel`", "# command.", "if", "'CIBW_TARGET_OSX_ARM64'", "in", "os", ".", "environ", ":", "cmake_cmd", ".", "append", "(", "\"-DCMAKE_OSX_ARCHITECTURES=arm64\"", ")", "self", ".", "logger", ".", "info", "(", "'Run CMake command: %s'", ",", "str", "(", "cmake_cmd", ")", ")", "subprocess", ".", "check_call", "(", "cmake_cmd", ",", "cwd", "=", "build_dir", ")", "if", "system", "(", ")", "!=", "'Windows'", ":", "nproc", "=", "os", ".", "cpu_count", "(", ")", "build_cmd", "=", "[", "build_tool", ",", "'treelite'", ",", "'-j'", "+", "str", "(", "nproc", ")", "]", "subprocess", ".", "check_call", "(", "build_cmd", ",", "cwd", "=", "build_dir", ")", "else", ":", "subprocess", ".", "check_call", "(", "[", "'cmake'", ",", "'--build'", ",", "'.'", ",", "'--config'", ",", "'Release'", ",", "'--target'", ",", "'treelite'", "]", ",", "cwd", "=", "build_dir", ")" ]
https://github.com/dmlc/treelite/blob/df56babb6a4a2d7c29d719c28ce53acfa7dbab3c/python/setup.py#L97-L117
baidu/AnyQ
d94d450d2aaa5f7ed73424b10aa4539835b97527
tools/simnet/train/paddle/losses/log_loss.py
python
LogLoss.__init__
(self, conf_dict)
initialize
initialize
[ "initialize" ]
def __init__(self, conf_dict): """ initialize """ pass
[ "def", "__init__", "(", "self", ",", "conf_dict", ")", ":", "pass" ]
https://github.com/baidu/AnyQ/blob/d94d450d2aaa5f7ed73424b10aa4539835b97527/tools/simnet/train/paddle/losses/log_loss.py#L24-L28
Alexhuszagh/rust-lexical
01fcdcf8efc8850edb35d8fc65fd5f31bd0981a0
scripts/size.py
python
build
(args, level, is_lexical)
Build the project.
Build the project.
[ "Build", "the", "project", "." ]
def build(args, level, is_lexical): '''Build the project.''' os.chdir(f'{home}/lexical-size') command = f'cargo +nightly build' if args.no_default_features: command = f'{command} --no-default-features' features = args.features if is_lexical: if features: features = f'{features},lexical' else: features = 'lexical' if features: command = f'{command} --features={features}' if LEVELS[level] == 'release': command = f'{command} --release' subprocess.check_call( # Use shell for faster performance. # Spawning a new process is a **lot** slower, gives misleading info. command, shell=True, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, )
[ "def", "build", "(", "args", ",", "level", ",", "is_lexical", ")", ":", "os", ".", "chdir", "(", "f'{home}/lexical-size'", ")", "command", "=", "f'cargo +nightly build'", "if", "args", ".", "no_default_features", ":", "command", "=", "f'{command} --no-default-features'", "features", "=", "args", ".", "features", "if", "is_lexical", ":", "if", "features", ":", "features", "=", "f'{features},lexical'", "else", ":", "features", "=", "'lexical'", "if", "features", ":", "command", "=", "f'{command} --features={features}'", "if", "LEVELS", "[", "level", "]", "==", "'release'", ":", "command", "=", "f'{command} --release'", "subprocess", ".", "check_call", "(", "# Use shell for faster performance.", "# Spawning a new process is a **lot** slower, gives misleading info.", "command", ",", "shell", "=", "True", ",", "stderr", "=", "subprocess", ".", "DEVNULL", ",", "stdout", "=", "subprocess", ".", "DEVNULL", ",", ")" ]
https://github.com/Alexhuszagh/rust-lexical/blob/01fcdcf8efc8850edb35d8fc65fd5f31bd0981a0/scripts/size.py#L172-L196
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/ntpath.py
python
_abspath_fallback
(path)
return normpath(path)
Return the absolute version of a path as a fallback function in case `nt._getfullpathname` is not available or raises OSError. See bpo-31047 for more.
Return the absolute version of a path as a fallback function in case `nt._getfullpathname` is not available or raises OSError. See bpo-31047 for more.
[ "Return", "the", "absolute", "version", "of", "a", "path", "as", "a", "fallback", "function", "in", "case", "nt", ".", "_getfullpathname", "is", "not", "available", "or", "raises", "OSError", ".", "See", "bpo", "-", "31047", "for", "more", "." ]
def _abspath_fallback(path): """Return the absolute version of a path as a fallback function in case `nt._getfullpathname` is not available or raises OSError. See bpo-31047 for more. """ path = os.fspath(path) if not isabs(path): if isinstance(path, bytes): cwd = os.getcwdb() else: cwd = os.getcwd() path = join(cwd, path) return normpath(path)
[ "def", "_abspath_fallback", "(", "path", ")", ":", "path", "=", "os", ".", "fspath", "(", "path", ")", "if", "not", "isabs", "(", "path", ")", ":", "if", "isinstance", "(", "path", ",", "bytes", ")", ":", "cwd", "=", "os", ".", "getcwdb", "(", ")", "else", ":", "cwd", "=", "os", ".", "getcwd", "(", ")", "path", "=", "join", "(", "cwd", ",", "path", ")", "return", "normpath", "(", "path", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/ntpath.py#L499-L513