nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
sequence
function
stringlengths
34
151k
function_tokens
sequence
url
stringlengths
90
278
weolar/miniblink49
1c4678db0594a4abde23d3ebbcc7cd13c3170777
third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/pep8.py
python
BaseReport.start
(self)
Start the timer.
Start the timer.
[ "Start", "the", "timer", "." ]
def start(self): """Start the timer.""" self._start_time = time.time()
[ "def", "start", "(", "self", ")", ":", "self", ".", "_start_time", "=", "time", ".", "time", "(", ")" ]
https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/pep8.py#L1474-L1476
miyosuda/TensorFlowAndroidDemo
35903e0221aa5f109ea2dbef27f20b52e317f42d
jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/random_forest.py
python
TensorForestEstimator.predict_proba
( self, x=None, input_fn=None, batch_size=None, as_iterable=False)
return super(TensorForestEstimator, self).predict( x=x, input_fn=input_fn, batch_size=batch_size, as_iterable=as_iterable)
Returns prediction probabilities for given features (classification). Args: x: features. input_fn: Input function. If set, x and y must be None. batch_size: Override default batch size. as_iterable: If True, return an iterable which keeps yielding predictions for each example until inputs are exhausted. Note: The inputs must terminate if you want the iterable to terminate (e.g. be sure to pass num_epochs=1 if you are using something like read_batch_features). Returns: Numpy array of predicted probabilities (or an iterable of predicted probabilities if as_iterable is True). Raises: ValueError: If both or neither of x and input_fn were given.
Returns prediction probabilities for given features (classification).
[ "Returns", "prediction", "probabilities", "for", "given", "features", "(", "classification", ")", "." ]
def predict_proba( self, x=None, input_fn=None, batch_size=None, as_iterable=False): """Returns prediction probabilities for given features (classification). Args: x: features. input_fn: Input function. If set, x and y must be None. batch_size: Override default batch size. as_iterable: If True, return an iterable which keeps yielding predictions for each example until inputs are exhausted. Note: The inputs must terminate if you want the iterable to terminate (e.g. be sure to pass num_epochs=1 if you are using something like read_batch_features). Returns: Numpy array of predicted probabilities (or an iterable of predicted probabilities if as_iterable is True). Raises: ValueError: If both or neither of x and input_fn were given. """ return super(TensorForestEstimator, self).predict( x=x, input_fn=input_fn, batch_size=batch_size, as_iterable=as_iterable)
[ "def", "predict_proba", "(", "self", ",", "x", "=", "None", ",", "input_fn", "=", "None", ",", "batch_size", "=", "None", ",", "as_iterable", "=", "False", ")", ":", "return", "super", "(", "TensorForestEstimator", ",", "self", ")", ".", "predict", "(", "x", "=", "x", ",", "input_fn", "=", "input_fn", ",", "batch_size", "=", "batch_size", ",", "as_iterable", "=", "as_iterable", ")" ]
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/random_forest.py#L102-L123
rsummers11/CADLab
976ed959a0b5208bb4173127a7ef732ac73a9b6f
lesion_detector_3DCE/rcnn/fio/load_ct_img.py
python
load_prep_img
(imname, slice_idx, spacing, slice_intv, do_clip=False, num_slice=3)
return im, im_scale, c
load volume, windowing, interpolate multiple slices, clip black border, resize according to spacing
load volume, windowing, interpolate multiple slices, clip black border, resize according to spacing
[ "load", "volume", "windowing", "interpolate", "multiple", "slices", "clip", "black", "border", "resize", "according", "to", "spacing" ]
def load_prep_img(imname, slice_idx, spacing, slice_intv, do_clip=False, num_slice=3): """load volume, windowing, interpolate multiple slices, clip black border, resize according to spacing""" if imname.endswith('.nii.gz') or imname.endswith('.nii'): im, mask = load_multislice_img_nifti(imname, slice_idx, slice_intv, do_clip, num_slice) else: im, mask = load_multislice_img_16bit_png(imname, slice_idx, slice_intv, do_clip, num_slice) im = windowing(im, config.WINDOWING) if do_clip: # clip black border c = get_range(mask, margin=0) im = im[c[0]:c[1] + 1, c[2]:c[3] + 1, :] # mask = mask[c[0]:c[1] + 1, c[2]:c[3] + 1] # print im.shape else: c = [0, im.shape[0]-1, 0, im.shape[1]-1] im_shape = im.shape[0:2] if spacing is not None and config.NORM_SPACING > 0: # spacing adjust, will overwrite simple scaling im_scale = float(spacing) / config.NORM_SPACING else: im_scale = float(config.SCALE) / float(np.min(im_shape)) # simple scaling max_shape = np.max(im_shape)*im_scale if max_shape > config.MAX_SIZE: im_scale1 = float(config.MAX_SIZE) / max_shape im_scale *= im_scale1 if im_scale != 1: im = cv2.resize(im, None, None, fx=im_scale, fy=im_scale, interpolation=cv2.INTER_LINEAR) # mask = cv2.resize(mask, None, None, fx=im_scale, fy=im_scale, interpolation=cv2.INTER_LINEAR) return im, im_scale, c
[ "def", "load_prep_img", "(", "imname", ",", "slice_idx", ",", "spacing", ",", "slice_intv", ",", "do_clip", "=", "False", ",", "num_slice", "=", "3", ")", ":", "if", "imname", ".", "endswith", "(", "'.nii.gz'", ")", "or", "imname", ".", "endswith", "(", "'.nii'", ")", ":", "im", ",", "mask", "=", "load_multislice_img_nifti", "(", "imname", ",", "slice_idx", ",", "slice_intv", ",", "do_clip", ",", "num_slice", ")", "else", ":", "im", ",", "mask", "=", "load_multislice_img_16bit_png", "(", "imname", ",", "slice_idx", ",", "slice_intv", ",", "do_clip", ",", "num_slice", ")", "im", "=", "windowing", "(", "im", ",", "config", ".", "WINDOWING", ")", "if", "do_clip", ":", "# clip black border", "c", "=", "get_range", "(", "mask", ",", "margin", "=", "0", ")", "im", "=", "im", "[", "c", "[", "0", "]", ":", "c", "[", "1", "]", "+", "1", ",", "c", "[", "2", "]", ":", "c", "[", "3", "]", "+", "1", ",", ":", "]", "# mask = mask[c[0]:c[1] + 1, c[2]:c[3] + 1]", "# print im.shape", "else", ":", "c", "=", "[", "0", ",", "im", ".", "shape", "[", "0", "]", "-", "1", ",", "0", ",", "im", ".", "shape", "[", "1", "]", "-", "1", "]", "im_shape", "=", "im", ".", "shape", "[", "0", ":", "2", "]", "if", "spacing", "is", "not", "None", "and", "config", ".", "NORM_SPACING", ">", "0", ":", "# spacing adjust, will overwrite simple scaling", "im_scale", "=", "float", "(", "spacing", ")", "/", "config", ".", "NORM_SPACING", "else", ":", "im_scale", "=", "float", "(", "config", ".", "SCALE", ")", "/", "float", "(", "np", ".", "min", "(", "im_shape", ")", ")", "# simple scaling", "max_shape", "=", "np", ".", "max", "(", "im_shape", ")", "*", "im_scale", "if", "max_shape", ">", "config", ".", "MAX_SIZE", ":", "im_scale1", "=", "float", "(", "config", ".", "MAX_SIZE", ")", "/", "max_shape", "im_scale", "*=", "im_scale1", "if", "im_scale", "!=", "1", ":", "im", "=", "cv2", ".", "resize", "(", "im", ",", "None", ",", "None", ",", "fx", "=", "im_scale", ",", "fy", "=", "im_scale", ",", "interpolation", "=", "cv2", ".", "INTER_LINEAR", ")", "# mask = cv2.resize(mask, None, None, fx=im_scale, fy=im_scale, interpolation=cv2.INTER_LINEAR)", "return", "im", ",", "im_scale", ",", "c" ]
https://github.com/rsummers11/CADLab/blob/976ed959a0b5208bb4173127a7ef732ac73a9b6f/lesion_detector_3DCE/rcnn/fio/load_ct_img.py#L16-L48
OAID/Tengine
66b2c22ad129d25e2fc6de3b22a608bb54dd90db
pytengine/tengine/node.py
python
Node.getOutputTensorByIdx
(self, idx)
return Tensor(tensor=tensor)
Get the output tensor handle of a node. :param idx: <int> The index of the output tensor. :return: The tensor handle or None on error.
Get the output tensor handle of a node. :param idx: <int> The index of the output tensor. :return: The tensor handle or None on error.
[ "Get", "the", "output", "tensor", "handle", "of", "a", "node", ".", ":", "param", "idx", ":", "<int", ">", "The", "index", "of", "the", "output", "tensor", ".", ":", "return", ":", "The", "tensor", "handle", "or", "None", "on", "error", "." ]
def getOutputTensorByIdx(self, idx): """ Get the output tensor handle of a node. :param idx: <int> The index of the output tensor. :return: The tensor handle or None on error. """ _LIB.get_node_output_tensor.restype = tensor_t tensor = _LIB.get_node_output_tensor(ctypes.c_void_p(self.node), idx) return Tensor(tensor=tensor)
[ "def", "getOutputTensorByIdx", "(", "self", ",", "idx", ")", ":", "_LIB", ".", "get_node_output_tensor", ".", "restype", "=", "tensor_t", "tensor", "=", "_LIB", ".", "get_node_output_tensor", "(", "ctypes", ".", "c_void_p", "(", "self", ".", "node", ")", ",", "idx", ")", "return", "Tensor", "(", "tensor", "=", "tensor", ")" ]
https://github.com/OAID/Tengine/blob/66b2c22ad129d25e2fc6de3b22a608bb54dd90db/pytengine/tengine/node.py#L65-L73
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/tornado/tornado-6/tornado/gen.py
python
is_coroutine_function
(func: Any)
return getattr(func, "__tornado_coroutine__", False)
Return whether *func* is a coroutine function, i.e. a function wrapped with `~.gen.coroutine`. .. versionadded:: 4.5
Return whether *func* is a coroutine function, i.e. a function wrapped with `~.gen.coroutine`.
[ "Return", "whether", "*", "func", "*", "is", "a", "coroutine", "function", "i", ".", "e", ".", "a", "function", "wrapped", "with", "~", ".", "gen", ".", "coroutine", "." ]
def is_coroutine_function(func: Any) -> bool: """Return whether *func* is a coroutine function, i.e. a function wrapped with `~.gen.coroutine`. .. versionadded:: 4.5 """ return getattr(func, "__tornado_coroutine__", False)
[ "def", "is_coroutine_function", "(", "func", ":", "Any", ")", "->", "bool", ":", "return", "getattr", "(", "func", ",", "\"__tornado_coroutine__\"", ",", "False", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/tornado/tornado-6/tornado/gen.py#L273-L279
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/keras/backend.py
python
backend
()
return 'tensorflow'
Publicly accessible method for determining the current backend. Only exists for API compatibility with multi-backend Keras. Returns: The string "tensorflow".
Publicly accessible method for determining the current backend.
[ "Publicly", "accessible", "method", "for", "determining", "the", "current", "backend", "." ]
def backend(): """Publicly accessible method for determining the current backend. Only exists for API compatibility with multi-backend Keras. Returns: The string "tensorflow". """ return 'tensorflow'
[ "def", "backend", "(", ")", ":", "return", "'tensorflow'" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/backend.py#L175-L183
google/mysql-protobuf
467cda676afaa49e762c5c9164a43f6ad31a1fbf
libevent/event_rpcgen.py
python
Struct.PrintIdented
(self, file, ident, code)
Takes an array, add indentation to each entry and prints it.
Takes an array, add indentation to each entry and prints it.
[ "Takes", "an", "array", "add", "indentation", "to", "each", "entry", "and", "prints", "it", "." ]
def PrintIdented(self, file, ident, code): """Takes an array, add indentation to each entry and prints it.""" for entry in code: print >>file, '%s%s' % (ident, entry)
[ "def", "PrintIdented", "(", "self", ",", "file", ",", "ident", ",", "code", ")", ":", "for", "entry", "in", "code", ":", "print", ">>", "file", ",", "'%s%s'", "%", "(", "ident", ",", "entry", ")" ]
https://github.com/google/mysql-protobuf/blob/467cda676afaa49e762c5c9164a43f6ad31a1fbf/libevent/event_rpcgen.py#L52-L55
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
contrib/gizmos/osx_cocoa/gizmos.py
python
TreeListCtrl.EditLabel
(*args, **kwargs)
return _gizmos.TreeListCtrl_EditLabel(*args, **kwargs)
EditLabel(self, TreeItemId item, int column=-1)
EditLabel(self, TreeItemId item, int column=-1)
[ "EditLabel", "(", "self", "TreeItemId", "item", "int", "column", "=", "-", "1", ")" ]
def EditLabel(*args, **kwargs): """EditLabel(self, TreeItemId item, int column=-1)""" return _gizmos.TreeListCtrl_EditLabel(*args, **kwargs)
[ "def", "EditLabel", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gizmos", ".", "TreeListCtrl_EditLabel", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/contrib/gizmos/osx_cocoa/gizmos.py#L923-L925
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/extern/aui/auibar.py
python
AuiToolBar.OnLeftUp
(self, event)
Handles the ``wx.EVT_LEFT_UP`` event for :class:`AuiToolBar`. :param `event`: a :class:`MouseEvent` event to be processed.
Handles the ``wx.EVT_LEFT_UP`` event for :class:`AuiToolBar`.
[ "Handles", "the", "wx", ".", "EVT_LEFT_UP", "event", "for", ":", "class", ":", "AuiToolBar", "." ]
def OnLeftUp(self, event): """ Handles the ``wx.EVT_LEFT_UP`` event for :class:`AuiToolBar`. :param `event`: a :class:`MouseEvent` event to be processed. """ self.SetPressedItem(None) hit_item = self.FindToolForPosition(*event.GetPosition()) if hit_item and not hit_item.state & AUI_BUTTON_STATE_DISABLED: self.SetHoverItem(hit_item) if self._dragging: # reset drag and drop member variables self._dragging = False self._action_pos = wx.Point(-1, -1) self._action_item = None else: if self._action_item and hit_item == self._action_item: self.SetToolTipString("") if hit_item.kind in [ITEM_CHECK, ITEM_RADIO]: toggle = not (self._action_item.state & AUI_BUTTON_STATE_CHECKED) self.ToggleTool(self._action_item.id, toggle) # repaint immediately self.Refresh(False) self.Update() e = wx.CommandEvent(wx.wxEVT_COMMAND_MENU_SELECTED, self._action_item.id) e.SetEventObject(self) e.SetInt(toggle) self._action_pos = wx.Point(-1, -1) self._action_item = None self.ProcessEvent(e) self.DoIdleUpdate() else: if self._action_item.id == ID_RESTORE_FRAME: # find aui manager manager = self.GetAuiManager() if not manager: return if self._action_item.target: pane = manager.GetPane(self._action_item.target) else: pane = manager.GetPane(self) e = framemanager.AuiManagerEvent(framemanager.wxEVT_AUI_PANE_MIN_RESTORE) e.SetManager(manager) e.SetPane(pane) manager.ProcessEvent(e) self.DoIdleUpdate() else: e = wx.CommandEvent(wx.wxEVT_COMMAND_MENU_SELECTED, self._action_item.id) e.SetEventObject(self) self.ProcessEvent(e) self.DoIdleUpdate() # reset drag and drop member variables self._dragging = False self._action_pos = wx.Point(-1, -1) self._action_item = None
[ "def", "OnLeftUp", "(", "self", ",", "event", ")", ":", "self", ".", "SetPressedItem", "(", "None", ")", "hit_item", "=", "self", ".", "FindToolForPosition", "(", "*", "event", ".", "GetPosition", "(", ")", ")", "if", "hit_item", "and", "not", "hit_item", ".", "state", "&", "AUI_BUTTON_STATE_DISABLED", ":", "self", ".", "SetHoverItem", "(", "hit_item", ")", "if", "self", ".", "_dragging", ":", "# reset drag and drop member variables", "self", ".", "_dragging", "=", "False", "self", ".", "_action_pos", "=", "wx", ".", "Point", "(", "-", "1", ",", "-", "1", ")", "self", ".", "_action_item", "=", "None", "else", ":", "if", "self", ".", "_action_item", "and", "hit_item", "==", "self", ".", "_action_item", ":", "self", ".", "SetToolTipString", "(", "\"\"", ")", "if", "hit_item", ".", "kind", "in", "[", "ITEM_CHECK", ",", "ITEM_RADIO", "]", ":", "toggle", "=", "not", "(", "self", ".", "_action_item", ".", "state", "&", "AUI_BUTTON_STATE_CHECKED", ")", "self", ".", "ToggleTool", "(", "self", ".", "_action_item", ".", "id", ",", "toggle", ")", "# repaint immediately", "self", ".", "Refresh", "(", "False", ")", "self", ".", "Update", "(", ")", "e", "=", "wx", ".", "CommandEvent", "(", "wx", ".", "wxEVT_COMMAND_MENU_SELECTED", ",", "self", ".", "_action_item", ".", "id", ")", "e", ".", "SetEventObject", "(", "self", ")", "e", ".", "SetInt", "(", "toggle", ")", "self", ".", "_action_pos", "=", "wx", ".", "Point", "(", "-", "1", ",", "-", "1", ")", "self", ".", "_action_item", "=", "None", "self", ".", "ProcessEvent", "(", "e", ")", "self", ".", "DoIdleUpdate", "(", ")", "else", ":", "if", "self", ".", "_action_item", ".", "id", "==", "ID_RESTORE_FRAME", ":", "# find aui manager", "manager", "=", "self", ".", "GetAuiManager", "(", ")", "if", "not", "manager", ":", "return", "if", "self", ".", "_action_item", ".", "target", ":", "pane", "=", "manager", ".", "GetPane", "(", "self", ".", "_action_item", ".", "target", ")", "else", ":", "pane", "=", "manager", ".", "GetPane", "(", "self", ")", "e", "=", "framemanager", ".", "AuiManagerEvent", "(", "framemanager", ".", "wxEVT_AUI_PANE_MIN_RESTORE", ")", "e", ".", "SetManager", "(", "manager", ")", "e", ".", "SetPane", "(", "pane", ")", "manager", ".", "ProcessEvent", "(", "e", ")", "self", ".", "DoIdleUpdate", "(", ")", "else", ":", "e", "=", "wx", ".", "CommandEvent", "(", "wx", ".", "wxEVT_COMMAND_MENU_SELECTED", ",", "self", ".", "_action_item", ".", "id", ")", "e", ".", "SetEventObject", "(", "self", ")", "self", ".", "ProcessEvent", "(", "e", ")", "self", ".", "DoIdleUpdate", "(", ")", "# reset drag and drop member variables", "self", ".", "_dragging", "=", "False", "self", ".", "_action_pos", "=", "wx", ".", "Point", "(", "-", "1", ",", "-", "1", ")", "self", ".", "_action_item", "=", "None" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/extern/aui/auibar.py#L3636-L3710
root-project/root
fcd3583bb14852bf2e8cd2415717cbaac0e75896
interpreter/llvm/src/tools/clang/tools/scan-build-py/libscanbuild/clang.py
python
get_checkers
(clang, plugins)
return checkers
Get all the available checkers from default and from the plugins. :param clang: the compiler we are using :param plugins: list of plugins which was requested by the user :return: a dictionary of all available checkers and its status {<checker name>: (<checker description>, <is active by default>)}
Get all the available checkers from default and from the plugins.
[ "Get", "all", "the", "available", "checkers", "from", "default", "and", "from", "the", "plugins", "." ]
def get_checkers(clang, plugins): """ Get all the available checkers from default and from the plugins. :param clang: the compiler we are using :param plugins: list of plugins which was requested by the user :return: a dictionary of all available checkers and its status {<checker name>: (<checker description>, <is active by default>)} """ load = [elem for plugin in plugins for elem in ['-load', plugin]] cmd = [clang, '-cc1'] + load + ['-analyzer-checker-help'] lines = run_command(cmd) is_active_checker = is_active(get_active_checkers(clang, plugins)) checkers = { name: (description, is_active_checker(name)) for name, description in parse_checkers(lines) } if not checkers: raise Exception('Could not query Clang for available checkers.') return checkers
[ "def", "get_checkers", "(", "clang", ",", "plugins", ")", ":", "load", "=", "[", "elem", "for", "plugin", "in", "plugins", "for", "elem", "in", "[", "'-load'", ",", "plugin", "]", "]", "cmd", "=", "[", "clang", ",", "'-cc1'", "]", "+", "load", "+", "[", "'-analyzer-checker-help'", "]", "lines", "=", "run_command", "(", "cmd", ")", "is_active_checker", "=", "is_active", "(", "get_active_checkers", "(", "clang", ",", "plugins", ")", ")", "checkers", "=", "{", "name", ":", "(", "description", ",", "is_active_checker", "(", "name", ")", ")", "for", "name", ",", "description", "in", "parse_checkers", "(", "lines", ")", "}", "if", "not", "checkers", ":", "raise", "Exception", "(", "'Could not query Clang for available checkers.'", ")", "return", "checkers" ]
https://github.com/root-project/root/blob/fcd3583bb14852bf2e8cd2415717cbaac0e75896/interpreter/llvm/src/tools/clang/tools/scan-build-py/libscanbuild/clang.py#L132-L155
bareos/bareos
56a10bb368b0a81e977bb51304033fe49d59efb0
restapi/bareos_restapi/__init__.py
python
read_all_filesets
( *, response: Response, current_user: User = Depends(get_current_user), verbose: Optional[bareosBool] = Query("yes", title="Verbose output"), )
return show_configuration_items( response=response, current_user=current_user, itemType="filesets", verbose=verbose, )
Read all jobdef resources. Built on console command _show filesets_. Needs at least Bareos Version >= 20.0.0
Read all jobdef resources. Built on console command _show filesets_.
[ "Read", "all", "jobdef", "resources", ".", "Built", "on", "console", "command", "_show", "filesets_", "." ]
def read_all_filesets( *, response: Response, current_user: User = Depends(get_current_user), verbose: Optional[bareosBool] = Query("yes", title="Verbose output"), ): """ Read all jobdef resources. Built on console command _show filesets_. Needs at least Bareos Version >= 20.0.0 """ return show_configuration_items( response=response, current_user=current_user, itemType="filesets", verbose=verbose, )
[ "def", "read_all_filesets", "(", "*", ",", "response", ":", "Response", ",", "current_user", ":", "User", "=", "Depends", "(", "get_current_user", ")", ",", "verbose", ":", "Optional", "[", "bareosBool", "]", "=", "Query", "(", "\"yes\"", ",", "title", "=", "\"Verbose output\"", ")", ",", ")", ":", "return", "show_configuration_items", "(", "response", "=", "response", ",", "current_user", "=", "current_user", ",", "itemType", "=", "\"filesets\"", ",", "verbose", "=", "verbose", ",", ")" ]
https://github.com/bareos/bareos/blob/56a10bb368b0a81e977bb51304033fe49d59efb0/restapi/bareos_restapi/__init__.py#L593-L609
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/roc/compiler.py
python
_unpack_argument
(ty, val, kernelargs, retr)
Convert arguments to ctypes and append to kernelargs
Convert arguments to ctypes and append to kernelargs
[ "Convert", "arguments", "to", "ctypes", "and", "append", "to", "kernelargs" ]
def _unpack_argument(ty, val, kernelargs, retr): """ Convert arguments to ctypes and append to kernelargs """ if isinstance(ty, types.Array): c_intp = ctypes.c_ssize_t # if a dgpu is present, move the data to the device. if dgpu_present: devary, conv = devicearray.auto_device(val, devices.get_context()) if conv: retr.append(lambda: devary.copy_to_host(val)) data = devary.device_ctypes_pointer else: data = ctypes.c_void_p(val.ctypes.data) meminfo = parent = ctypes.c_void_p(0) nitems = c_intp(val.size) itemsize = c_intp(val.dtype.itemsize) kernelargs.append(meminfo) kernelargs.append(parent) kernelargs.append(nitems) kernelargs.append(itemsize) kernelargs.append(data) for ax in range(val.ndim): kernelargs.append(c_intp(val.shape[ax])) for ax in range(val.ndim): kernelargs.append(c_intp(val.strides[ax])) elif isinstance(ty, types.Integer): cval = getattr(ctypes, "c_%s" % ty)(val) kernelargs.append(cval) elif ty == types.float64: cval = ctypes.c_double(val) kernelargs.append(cval) elif ty == types.float32: cval = ctypes.c_float(val) kernelargs.append(cval) elif ty == types.boolean: cval = ctypes.c_uint8(int(val)) kernelargs.append(cval) elif ty == types.complex64: kernelargs.append(ctypes.c_float(val.real)) kernelargs.append(ctypes.c_float(val.imag)) elif ty == types.complex128: kernelargs.append(ctypes.c_double(val.real)) kernelargs.append(ctypes.c_double(val.imag)) else: raise NotImplementedError(ty, val)
[ "def", "_unpack_argument", "(", "ty", ",", "val", ",", "kernelargs", ",", "retr", ")", ":", "if", "isinstance", "(", "ty", ",", "types", ".", "Array", ")", ":", "c_intp", "=", "ctypes", ".", "c_ssize_t", "# if a dgpu is present, move the data to the device.", "if", "dgpu_present", ":", "devary", ",", "conv", "=", "devicearray", ".", "auto_device", "(", "val", ",", "devices", ".", "get_context", "(", ")", ")", "if", "conv", ":", "retr", ".", "append", "(", "lambda", ":", "devary", ".", "copy_to_host", "(", "val", ")", ")", "data", "=", "devary", ".", "device_ctypes_pointer", "else", ":", "data", "=", "ctypes", ".", "c_void_p", "(", "val", ".", "ctypes", ".", "data", ")", "meminfo", "=", "parent", "=", "ctypes", ".", "c_void_p", "(", "0", ")", "nitems", "=", "c_intp", "(", "val", ".", "size", ")", "itemsize", "=", "c_intp", "(", "val", ".", "dtype", ".", "itemsize", ")", "kernelargs", ".", "append", "(", "meminfo", ")", "kernelargs", ".", "append", "(", "parent", ")", "kernelargs", ".", "append", "(", "nitems", ")", "kernelargs", ".", "append", "(", "itemsize", ")", "kernelargs", ".", "append", "(", "data", ")", "for", "ax", "in", "range", "(", "val", ".", "ndim", ")", ":", "kernelargs", ".", "append", "(", "c_intp", "(", "val", ".", "shape", "[", "ax", "]", ")", ")", "for", "ax", "in", "range", "(", "val", ".", "ndim", ")", ":", "kernelargs", ".", "append", "(", "c_intp", "(", "val", ".", "strides", "[", "ax", "]", ")", ")", "elif", "isinstance", "(", "ty", ",", "types", ".", "Integer", ")", ":", "cval", "=", "getattr", "(", "ctypes", ",", "\"c_%s\"", "%", "ty", ")", "(", "val", ")", "kernelargs", ".", "append", "(", "cval", ")", "elif", "ty", "==", "types", ".", "float64", ":", "cval", "=", "ctypes", ".", "c_double", "(", "val", ")", "kernelargs", ".", "append", "(", "cval", ")", "elif", "ty", "==", "types", ".", "float32", ":", "cval", "=", "ctypes", ".", "c_float", "(", "val", ")", "kernelargs", ".", "append", "(", "cval", ")", "elif", "ty", "==", "types", ".", "boolean", ":", "cval", "=", "ctypes", ".", "c_uint8", "(", "int", "(", "val", ")", ")", "kernelargs", ".", "append", "(", "cval", ")", "elif", "ty", "==", "types", ".", "complex64", ":", "kernelargs", ".", "append", "(", "ctypes", ".", "c_float", "(", "val", ".", "real", ")", ")", "kernelargs", ".", "append", "(", "ctypes", ".", "c_float", "(", "val", ".", "imag", ")", ")", "elif", "ty", "==", "types", ".", "complex128", ":", "kernelargs", ".", "append", "(", "ctypes", ".", "c_double", "(", "val", ".", "real", ")", ")", "kernelargs", ".", "append", "(", "ctypes", ".", "c_double", "(", "val", ".", "imag", ")", ")", "else", ":", "raise", "NotImplementedError", "(", "ty", ",", "val", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/roc/compiler.py#L374-L428
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_core.py
python
BookCtrlEvent.SetSelection
(*args, **kwargs)
return _core_.BookCtrlEvent_SetSelection(*args, **kwargs)
SetSelection(self, int nSel)
SetSelection(self, int nSel)
[ "SetSelection", "(", "self", "int", "nSel", ")" ]
def SetSelection(*args, **kwargs): """SetSelection(self, int nSel)""" return _core_.BookCtrlEvent_SetSelection(*args, **kwargs)
[ "def", "SetSelection", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "BookCtrlEvent_SetSelection", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L13719-L13721
cvxpy/cvxpy
5165b4fb750dfd237de8659383ef24b4b2e33aaf
cvxpy/atoms/affine/trace.py
python
trace.shape_from_args
(self)
return tuple()
Always scalar.
Always scalar.
[ "Always", "scalar", "." ]
def shape_from_args(self) -> Tuple[int, ...]: """Always scalar. """ return tuple()
[ "def", "shape_from_args", "(", "self", ")", "->", "Tuple", "[", "int", ",", "...", "]", ":", "return", "tuple", "(", ")" ]
https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/atoms/affine/trace.py#L51-L54
krishauser/Klampt
972cc83ea5befac3f653c1ba20f80155768ad519
Python/klampt/robotsim.py
python
Appearance.setTexgen
(self, np_array2: Vector, worldcoordinates: bool=False)
return _robotsim.Appearance_setTexgen(self, np_array2, worldcoordinates)
r""" Sets the texture generation. The array must be size m x 4, with m in the range 0,...,4. If worldcoordinates=true, the texture generation is performed in world coordinates rather than object coordinates. Args: np_array2 (:obj:`2D Numpy array of floats`) worldcoordinates (bool, optional): default value False
r""" Sets the texture generation. The array must be size m x 4, with m in the range 0,...,4. If worldcoordinates=true, the texture generation is performed in world coordinates rather than object coordinates.
[ "r", "Sets", "the", "texture", "generation", ".", "The", "array", "must", "be", "size", "m", "x", "4", "with", "m", "in", "the", "range", "0", "...", "4", ".", "If", "worldcoordinates", "=", "true", "the", "texture", "generation", "is", "performed", "in", "world", "coordinates", "rather", "than", "object", "coordinates", "." ]
def setTexgen(self, np_array2: Vector, worldcoordinates: bool=False) ->None: r""" Sets the texture generation. The array must be size m x 4, with m in the range 0,...,4. If worldcoordinates=true, the texture generation is performed in world coordinates rather than object coordinates. Args: np_array2 (:obj:`2D Numpy array of floats`) worldcoordinates (bool, optional): default value False """ return _robotsim.Appearance_setTexgen(self, np_array2, worldcoordinates)
[ "def", "setTexgen", "(", "self", ",", "np_array2", ":", "Vector", ",", "worldcoordinates", ":", "bool", "=", "False", ")", "->", "None", ":", "return", "_robotsim", ".", "Appearance_setTexgen", "(", "self", ",", "np_array2", ",", "worldcoordinates", ")" ]
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/klampt/robotsim.py#L3048-L3058
macchina-io/macchina.io
ef24ba0e18379c3dd48fb84e6dbf991101cb8db0
platform/JS/V8/tools/gyp/pylib/gyp/generator/dump_dependency_json.py
python
CalculateGeneratorInputInfo
(params)
Calculate the generator specific info that gets fed to input (called by gyp).
Calculate the generator specific info that gets fed to input (called by gyp).
[ "Calculate", "the", "generator", "specific", "info", "that", "gets", "fed", "to", "input", "(", "called", "by", "gyp", ")", "." ]
def CalculateGeneratorInputInfo(params): """Calculate the generator specific info that gets fed to input (called by gyp).""" generator_flags = params.get('generator_flags', {}) if generator_flags.get('adjust_static_libraries', False): global generator_wants_static_library_dependencies_adjusted generator_wants_static_library_dependencies_adjusted = True toplevel = params['options'].toplevel_dir generator_dir = os.path.relpath(params['options'].generator_output or '.') # output_dir: relative path from generator_dir to the build directory. output_dir = generator_flags.get('output_dir', 'out') qualified_out_dir = os.path.normpath(os.path.join( toplevel, generator_dir, output_dir, 'gypfiles')) global generator_filelist_paths generator_filelist_paths = { 'toplevel': toplevel, 'qualified_out_dir': qualified_out_dir, }
[ "def", "CalculateGeneratorInputInfo", "(", "params", ")", ":", "generator_flags", "=", "params", ".", "get", "(", "'generator_flags'", ",", "{", "}", ")", "if", "generator_flags", ".", "get", "(", "'adjust_static_libraries'", ",", "False", ")", ":", "global", "generator_wants_static_library_dependencies_adjusted", "generator_wants_static_library_dependencies_adjusted", "=", "True", "toplevel", "=", "params", "[", "'options'", "]", ".", "toplevel_dir", "generator_dir", "=", "os", ".", "path", ".", "relpath", "(", "params", "[", "'options'", "]", ".", "generator_output", "or", "'.'", ")", "# output_dir: relative path from generator_dir to the build directory.", "output_dir", "=", "generator_flags", ".", "get", "(", "'output_dir'", ",", "'out'", ")", "qualified_out_dir", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "toplevel", ",", "generator_dir", ",", "output_dir", ",", "'gypfiles'", ")", ")", "global", "generator_filelist_paths", "generator_filelist_paths", "=", "{", "'toplevel'", ":", "toplevel", ",", "'qualified_out_dir'", ":", "qualified_out_dir", ",", "}" ]
https://github.com/macchina-io/macchina.io/blob/ef24ba0e18379c3dd48fb84e6dbf991101cb8db0/platform/JS/V8/tools/gyp/pylib/gyp/generator/dump_dependency_json.py#L54-L72
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/fft/fftpack.py
python
irfft2
(a, s=None, axes=(-2, -1))
return irfftn(a, s, axes)
Compute the 2-dimensional inverse FFT of a real array. Parameters ---------- a : array_like The input array s : sequence of ints, optional Shape of the inverse FFT. axes : sequence of ints, optional The axes over which to compute the inverse fft. Default is the last two axes. Returns ------- out : ndarray The result of the inverse real 2-D FFT. See Also -------- irfftn : Compute the inverse of the N-dimensional FFT of real input. Notes ----- This is really `irfftn` with different defaults. For more details see `irfftn`.
Compute the 2-dimensional inverse FFT of a real array.
[ "Compute", "the", "2", "-", "dimensional", "inverse", "FFT", "of", "a", "real", "array", "." ]
def irfft2(a, s=None, axes=(-2, -1)): """ Compute the 2-dimensional inverse FFT of a real array. Parameters ---------- a : array_like The input array s : sequence of ints, optional Shape of the inverse FFT. axes : sequence of ints, optional The axes over which to compute the inverse fft. Default is the last two axes. Returns ------- out : ndarray The result of the inverse real 2-D FFT. See Also -------- irfftn : Compute the inverse of the N-dimensional FFT of real input. Notes ----- This is really `irfftn` with different defaults. For more details see `irfftn`. """ return irfftn(a, s, axes)
[ "def", "irfft2", "(", "a", ",", "s", "=", "None", ",", "axes", "=", "(", "-", "2", ",", "-", "1", ")", ")", ":", "return", "irfftn", "(", "a", ",", "s", ",", "axes", ")" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/fft/fftpack.py#L1097-L1127
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/ntpath.py
python
relpath
(path, start=None)
Return a relative version of a path
Return a relative version of a path
[ "Return", "a", "relative", "version", "of", "a", "path" ]
def relpath(path, start=None): """Return a relative version of a path""" path = os.fspath(path) if isinstance(path, bytes): sep = b'\\' curdir = b'.' pardir = b'..' else: sep = '\\' curdir = '.' pardir = '..' if start is None: start = curdir if not path: raise ValueError("no path specified") start = os.fspath(start) try: start_abs = abspath(normpath(start)) path_abs = abspath(normpath(path)) start_drive, start_rest = splitdrive(start_abs) path_drive, path_rest = splitdrive(path_abs) if normcase(start_drive) != normcase(path_drive): raise ValueError("path is on mount %r, start on mount %r" % ( path_drive, start_drive)) start_list = [x for x in start_rest.split(sep) if x] path_list = [x for x in path_rest.split(sep) if x] # Work out how much of the filepath is shared by start and path. i = 0 for e1, e2 in zip(start_list, path_list): if normcase(e1) != normcase(e2): break i += 1 rel_list = [pardir] * (len(start_list)-i) + path_list[i:] if not rel_list: return curdir return join(*rel_list) except (TypeError, ValueError, AttributeError, BytesWarning, DeprecationWarning): genericpath._check_arg_types('relpath', path, start) raise
[ "def", "relpath", "(", "path", ",", "start", "=", "None", ")", ":", "path", "=", "os", ".", "fspath", "(", "path", ")", "if", "isinstance", "(", "path", ",", "bytes", ")", ":", "sep", "=", "b'\\\\'", "curdir", "=", "b'.'", "pardir", "=", "b'..'", "else", ":", "sep", "=", "'\\\\'", "curdir", "=", "'.'", "pardir", "=", "'..'", "if", "start", "is", "None", ":", "start", "=", "curdir", "if", "not", "path", ":", "raise", "ValueError", "(", "\"no path specified\"", ")", "start", "=", "os", ".", "fspath", "(", "start", ")", "try", ":", "start_abs", "=", "abspath", "(", "normpath", "(", "start", ")", ")", "path_abs", "=", "abspath", "(", "normpath", "(", "path", ")", ")", "start_drive", ",", "start_rest", "=", "splitdrive", "(", "start_abs", ")", "path_drive", ",", "path_rest", "=", "splitdrive", "(", "path_abs", ")", "if", "normcase", "(", "start_drive", ")", "!=", "normcase", "(", "path_drive", ")", ":", "raise", "ValueError", "(", "\"path is on mount %r, start on mount %r\"", "%", "(", "path_drive", ",", "start_drive", ")", ")", "start_list", "=", "[", "x", "for", "x", "in", "start_rest", ".", "split", "(", "sep", ")", "if", "x", "]", "path_list", "=", "[", "x", "for", "x", "in", "path_rest", ".", "split", "(", "sep", ")", "if", "x", "]", "# Work out how much of the filepath is shared by start and path.", "i", "=", "0", "for", "e1", ",", "e2", "in", "zip", "(", "start_list", ",", "path_list", ")", ":", "if", "normcase", "(", "e1", ")", "!=", "normcase", "(", "e2", ")", ":", "break", "i", "+=", "1", "rel_list", "=", "[", "pardir", "]", "*", "(", "len", "(", "start_list", ")", "-", "i", ")", "+", "path_list", "[", "i", ":", "]", "if", "not", "rel_list", ":", "return", "curdir", "return", "join", "(", "*", "rel_list", ")", "except", "(", "TypeError", ",", "ValueError", ",", "AttributeError", ",", "BytesWarning", ",", "DeprecationWarning", ")", ":", "genericpath", ".", "_check_arg_types", "(", "'relpath'", ",", "path", ",", "start", ")", "raise" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/ntpath.py#L536-L579
DGA-MI-SSI/YaCo
9b85e6ca1809114c4df1382c11255f7e38408912
deps/flatbuffers-1.8.0/python/flatbuffers/builder.py
python
Builder.PlaceSOffsetT
(self, x)
PlaceSOffsetT prepends a SOffsetT to the Builder, without checking for space.
PlaceSOffsetT prepends a SOffsetT to the Builder, without checking for space.
[ "PlaceSOffsetT", "prepends", "a", "SOffsetT", "to", "the", "Builder", "without", "checking", "for", "space", "." ]
def PlaceSOffsetT(self, x): """PlaceSOffsetT prepends a SOffsetT to the Builder, without checking for space. """ N.enforce_number(x, N.SOffsetTFlags) self.head = self.head - N.SOffsetTFlags.bytewidth encode.Write(packer.soffset, self.Bytes, self.Head(), x)
[ "def", "PlaceSOffsetT", "(", "self", ",", "x", ")", ":", "N", ".", "enforce_number", "(", "x", ",", "N", ".", "SOffsetTFlags", ")", "self", ".", "head", "=", "self", ".", "head", "-", "N", ".", "SOffsetTFlags", ".", "bytewidth", "encode", ".", "Write", "(", "packer", ".", "soffset", ",", "self", ".", "Bytes", ",", "self", ".", "Head", "(", ")", ",", "x", ")" ]
https://github.com/DGA-MI-SSI/YaCo/blob/9b85e6ca1809114c4df1382c11255f7e38408912/deps/flatbuffers-1.8.0/python/flatbuffers/builder.py#L664-L670
PaddlePaddle/Paddle
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
python/paddle/fluid/dygraph/dygraph_to_static/partial_program.py
python
PartialProgramLayer._infer_program
(self)
return self._clone_for_test(self._origin_main_program)
Lazy initialized property of infer_program.
Lazy initialized property of infer_program.
[ "Lazy", "initialized", "property", "of", "infer_program", "." ]
def _infer_program(self): """ Lazy initialized property of infer_program. """ return self._clone_for_test(self._origin_main_program)
[ "def", "_infer_program", "(", "self", ")", ":", "return", "self", ".", "_clone_for_test", "(", "self", ".", "_origin_main_program", ")" ]
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/dygraph/dygraph_to_static/partial_program.py#L165-L169
hpi-xnor/BMXNet-v2
af2b1859eafc5c721b1397cef02f946aaf2ce20d
python/mxnet/contrib/onnx/mx2onnx/_op_translations.py
python
convert_flatten
(node, **kwargs)
return create_basic_op_node('Flatten', node, kwargs)
Map MXNet's Flatten operator attributes to onnx's Flatten operator and return the created node.
Map MXNet's Flatten operator attributes to onnx's Flatten operator and return the created node.
[ "Map", "MXNet", "s", "Flatten", "operator", "attributes", "to", "onnx", "s", "Flatten", "operator", "and", "return", "the", "created", "node", "." ]
def convert_flatten(node, **kwargs): """Map MXNet's Flatten operator attributes to onnx's Flatten operator and return the created node. """ return create_basic_op_node('Flatten', node, kwargs)
[ "def", "convert_flatten", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Flatten'", ",", "node", ",", "kwargs", ")" ]
https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py#L965-L969
LiquidPlayer/LiquidCore
9405979363f2353ac9a71ad8ab59685dd7f919c9
deps/boost_1_66_0/libs/metaparse/tools/benchmark/generate.py
python
random_chars
(number)
return ( format_character(nth_char(char_map, random.randint(0, char_num - 1))) for _ in xrange(0, number) )
Generate random characters
Generate random characters
[ "Generate", "random", "characters" ]
def random_chars(number): """Generate random characters""" char_map = { k: v for k, v in chars.CHARS.iteritems() if not format_character(k).startswith('\\x') } char_num = sum(char_map.values()) return ( format_character(nth_char(char_map, random.randint(0, char_num - 1))) for _ in xrange(0, number) )
[ "def", "random_chars", "(", "number", ")", ":", "char_map", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "chars", ".", "CHARS", ".", "iteritems", "(", ")", "if", "not", "format_character", "(", "k", ")", ".", "startswith", "(", "'\\\\x'", ")", "}", "char_num", "=", "sum", "(", "char_map", ".", "values", "(", ")", ")", "return", "(", "format_character", "(", "nth_char", "(", "char_map", ",", "random", ".", "randint", "(", "0", ",", "char_num", "-", "1", ")", ")", ")", "for", "_", "in", "xrange", "(", "0", ",", "number", ")", ")" ]
https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/boost_1_66_0/libs/metaparse/tools/benchmark/generate.py#L50-L61
ArduPilot/ardupilot
6e684b3496122b8158ac412b609d00004b7ac306
Tools/scripts/build_binaries.py
python
build_binaries.build_antennatracker
(self, tag)
build Tracker binaries
build Tracker binaries
[ "build", "Tracker", "binaries" ]
def build_antennatracker(self, tag): '''build Tracker binaries''' boards = self.common_boards()[:] self.build_vehicle(tag, "AntennaTracker", boards, "AntennaTracker", "antennatracker")
[ "def", "build_antennatracker", "(", "self", ",", "tag", ")", ":", "boards", "=", "self", ".", "common_boards", "(", ")", "[", ":", "]", "self", ".", "build_vehicle", "(", "tag", ",", "\"AntennaTracker\"", ",", "boards", ",", "\"AntennaTracker\"", ",", "\"antennatracker\"", ")" ]
https://github.com/ArduPilot/ardupilot/blob/6e684b3496122b8158ac412b609d00004b7ac306/Tools/scripts/build_binaries.py#L550-L557
tum-vision/fusenet
a1451be2971b348a01b0f525c2a3a7a0e215a591
scripts/cpp_lint.py
python
_NestingState.CheckCompletedBlocks
(self, filename, error)
Checks that all classes and namespaces have been completely parsed. Call this when all lines in a file have been processed. Args: filename: The name of the current file. error: The function to call with any errors found.
Checks that all classes and namespaces have been completely parsed.
[ "Checks", "that", "all", "classes", "and", "namespaces", "have", "been", "completely", "parsed", "." ]
def CheckCompletedBlocks(self, filename, error): """Checks that all classes and namespaces have been completely parsed. Call this when all lines in a file have been processed. Args: filename: The name of the current file. error: The function to call with any errors found. """ # Note: This test can result in false positives if #ifdef constructs # get in the way of brace matching. See the testBuildClass test in # cpplint_unittest.py for an example of this. for obj in self.stack: if isinstance(obj, _ClassInfo): error(filename, obj.starting_linenum, 'build/class', 5, 'Failed to find complete declaration of class %s' % obj.name) elif isinstance(obj, _NamespaceInfo): error(filename, obj.starting_linenum, 'build/namespaces', 5, 'Failed to find complete declaration of namespace %s' % obj.name)
[ "def", "CheckCompletedBlocks", "(", "self", ",", "filename", ",", "error", ")", ":", "# Note: This test can result in false positives if #ifdef constructs", "# get in the way of brace matching. See the testBuildClass test in", "# cpplint_unittest.py for an example of this.", "for", "obj", "in", "self", ".", "stack", ":", "if", "isinstance", "(", "obj", ",", "_ClassInfo", ")", ":", "error", "(", "filename", ",", "obj", ".", "starting_linenum", ",", "'build/class'", ",", "5", ",", "'Failed to find complete declaration of class %s'", "%", "obj", ".", "name", ")", "elif", "isinstance", "(", "obj", ",", "_NamespaceInfo", ")", ":", "error", "(", "filename", ",", "obj", ".", "starting_linenum", ",", "'build/namespaces'", ",", "5", ",", "'Failed to find complete declaration of namespace %s'", "%", "obj", ".", "name", ")" ]
https://github.com/tum-vision/fusenet/blob/a1451be2971b348a01b0f525c2a3a7a0e215a591/scripts/cpp_lint.py#L2172-L2191
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
lts/tools/inspector_protocol/jinja2/parser.py
python
Parser.free_identifier
(self, lineno=None)
return rv
Return a new free identifier as :class:`~jinja2.nodes.InternalName`.
Return a new free identifier as :class:`~jinja2.nodes.InternalName`.
[ "Return", "a", "new", "free", "identifier", "as", ":", "class", ":", "~jinja2", ".", "nodes", ".", "InternalName", "." ]
def free_identifier(self, lineno=None): """Return a new free identifier as :class:`~jinja2.nodes.InternalName`.""" self._last_identifier += 1 rv = object.__new__(nodes.InternalName) nodes.Node.__init__(rv, 'fi%d' % self._last_identifier, lineno=lineno) return rv
[ "def", "free_identifier", "(", "self", ",", "lineno", "=", "None", ")", ":", "self", ".", "_last_identifier", "+=", "1", "rv", "=", "object", ".", "__new__", "(", "nodes", ".", "InternalName", ")", "nodes", ".", "Node", ".", "__init__", "(", "rv", ",", "'fi%d'", "%", "self", ".", "_last_identifier", ",", "lineno", "=", "lineno", ")", "return", "rv" ]
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/tools/inspector_protocol/jinja2/parser.py#L114-L119
PX4/PX4-Autopilot
0b9f60a0370be53d683352c63fd92db3d6586e18
Tools/mavlink_px4.py
python
MAVLink.change_operator_control_ack_send
(self, gcs_system_id, control_request, ack)
return self.send(self.change_operator_control_ack_encode(gcs_system_id, control_request, ack))
Accept / deny control of this MAV gcs_system_id : ID of the GCS this message (uint8_t) control_request : 0: request control of this MAV, 1: Release control of this MAV (uint8_t) ack : 0: ACK, 1: NACK: Wrong passkey, 2: NACK: Unsupported passkey encryption method, 3: NACK: Already under control (uint8_t)
Accept / deny control of this MAV
[ "Accept", "/", "deny", "control", "of", "this", "MAV" ]
def change_operator_control_ack_send(self, gcs_system_id, control_request, ack): ''' Accept / deny control of this MAV gcs_system_id : ID of the GCS this message (uint8_t) control_request : 0: request control of this MAV, 1: Release control of this MAV (uint8_t) ack : 0: ACK, 1: NACK: Wrong passkey, 2: NACK: Unsupported passkey encryption method, 3: NACK: Already under control (uint8_t) ''' return self.send(self.change_operator_control_ack_encode(gcs_system_id, control_request, ack))
[ "def", "change_operator_control_ack_send", "(", "self", ",", "gcs_system_id", ",", "control_request", ",", "ack", ")", ":", "return", "self", ".", "send", "(", "self", ".", "change_operator_control_ack_encode", "(", "gcs_system_id", ",", "control_request", ",", "ack", ")", ")" ]
https://github.com/PX4/PX4-Autopilot/blob/0b9f60a0370be53d683352c63fd92db3d6586e18/Tools/mavlink_px4.py#L2600-L2609
hfinkel/llvm-project-cxxjit
91084ef018240bbb8e24235ff5cd8c355a9c1a1e
llvm/utils/lit/lit/util.py
python
to_string
(b)
Return the parameter as type 'str', possibly encoding it. In Python2, the 'str' type is the same as 'bytes'. In Python3, the 'str' type is (essentially) Python2's 'unicode' type, and 'bytes' is distinct.
Return the parameter as type 'str', possibly encoding it.
[ "Return", "the", "parameter", "as", "type", "str", "possibly", "encoding", "it", "." ]
def to_string(b): """Return the parameter as type 'str', possibly encoding it. In Python2, the 'str' type is the same as 'bytes'. In Python3, the 'str' type is (essentially) Python2's 'unicode' type, and 'bytes' is distinct. """ if isinstance(b, str): # In Python2, this branch is taken for types 'str' and 'bytes'. # In Python3, this branch is taken only for 'str'. return b if isinstance(b, bytes): # In Python2, this branch is never taken ('bytes' is handled as 'str'). # In Python3, this is true only for 'bytes'. try: return b.decode('utf-8') except UnicodeDecodeError: # If the value is not valid Unicode, return the default # repr-line encoding. return str(b) # By this point, here's what we *don't* have: # # - In Python2: # - 'str' or 'bytes' (1st branch above) # - In Python3: # - 'str' (1st branch above) # - 'bytes' (2nd branch above) # # The last type we might expect is the Python2 'unicode' type. There is no # 'unicode' type in Python3 (all the Python3 cases were already handled). In # order to get a 'str' object, we need to encode the 'unicode' object. try: return b.encode('utf-8') except AttributeError: raise TypeError('not sure how to convert %s to %s' % (type(b), str))
[ "def", "to_string", "(", "b", ")", ":", "if", "isinstance", "(", "b", ",", "str", ")", ":", "# In Python2, this branch is taken for types 'str' and 'bytes'.", "# In Python3, this branch is taken only for 'str'.", "return", "b", "if", "isinstance", "(", "b", ",", "bytes", ")", ":", "# In Python2, this branch is never taken ('bytes' is handled as 'str').", "# In Python3, this is true only for 'bytes'.", "try", ":", "return", "b", ".", "decode", "(", "'utf-8'", ")", "except", "UnicodeDecodeError", ":", "# If the value is not valid Unicode, return the default", "# repr-line encoding.", "return", "str", "(", "b", ")", "# By this point, here's what we *don't* have:", "#", "# - In Python2:", "# - 'str' or 'bytes' (1st branch above)", "# - In Python3:", "# - 'str' (1st branch above)", "# - 'bytes' (2nd branch above)", "#", "# The last type we might expect is the Python2 'unicode' type. There is no", "# 'unicode' type in Python3 (all the Python3 cases were already handled). In", "# order to get a 'str' object, we need to encode the 'unicode' object.", "try", ":", "return", "b", ".", "encode", "(", "'utf-8'", ")", "except", "AttributeError", ":", "raise", "TypeError", "(", "'not sure how to convert %s to %s'", "%", "(", "type", "(", "b", ")", ",", "str", ")", ")" ]
https://github.com/hfinkel/llvm-project-cxxjit/blob/91084ef018240bbb8e24235ff5cd8c355a9c1a1e/llvm/utils/lit/lit/util.py#L66-L102
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/ed_vim.py
python
Join
(editor, repeat, cmd)
Join lines command. @see: vim_parser
Join lines command. @see: vim_parser
[ "Join", "lines", "command", ".", "@see", ":", "vim_parser" ]
def Join(editor, repeat, cmd): """Join lines command. @see: vim_parser """ editor.PushCaret() editor.JoinLines(repeat) editor.PopCaret()
[ "def", "Join", "(", "editor", ",", "repeat", ",", "cmd", ")", ":", "editor", ".", "PushCaret", "(", ")", "editor", ".", "JoinLines", "(", "repeat", ")", "editor", ".", "PopCaret", "(", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/ed_vim.py#L1116-L1123
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/tkinter/ttk.py
python
Spinbox.set
(self, value)
Sets the value of the Spinbox to value.
Sets the value of the Spinbox to value.
[ "Sets", "the", "value", "of", "the", "Spinbox", "to", "value", "." ]
def set(self, value): """Sets the value of the Spinbox to value.""" self.tk.call(self._w, "set", value)
[ "def", "set", "(", "self", ",", "value", ")", ":", "self", ".", "tk", ".", "call", "(", "self", ".", "_w", ",", "\"set\"", ",", "value", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/tkinter/ttk.py#L1177-L1179
ceph/ceph
959663007321a369c83218414a29bd9dbc8bda3a
src/pybind/mgr/cephadm/inventory.py
python
SpecStore.all_specs
(self)
return self._specs
returns active and deleted specs. Returns read-only dict.
returns active and deleted specs. Returns read-only dict.
[ "returns", "active", "and", "deleted", "specs", ".", "Returns", "read", "-", "only", "dict", "." ]
def all_specs(self) -> Mapping[str, ServiceSpec]: """ returns active and deleted specs. Returns read-only dict. """ return self._specs
[ "def", "all_specs", "(", "self", ")", "->", "Mapping", "[", "str", ",", "ServiceSpec", "]", ":", "return", "self", ".", "_specs" ]
https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/src/pybind/mgr/cephadm/inventory.py#L186-L190
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
src/external/boost/boost_1_68_0/tools/build/src/build/engine.py
python
Engine.get_target_variable
(self, targets, variable)
return bjam_interface.call('get-target-variable', targets, variable)
Gets the value of `variable` on set on the first target in `targets`. Args: targets (str or list): one or more targets to get the variable from. variable (str): the name of the variable Returns: the value of `variable` set on `targets` (list) Example: >>> ENGINE = get_manager().engine() >>> ENGINE.set_target_variable(targets, 'MY-VAR', 'Hello World') >>> ENGINE.get_target_variable(targets, 'MY-VAR') ['Hello World'] Equivalent Jam code: MY-VAR on $(targets) = "Hello World" ; echo [ on $(targets) return $(MY-VAR) ] ; "Hello World"
Gets the value of `variable` on set on the first target in `targets`.
[ "Gets", "the", "value", "of", "variable", "on", "set", "on", "the", "first", "target", "in", "targets", "." ]
def get_target_variable(self, targets, variable): """Gets the value of `variable` on set on the first target in `targets`. Args: targets (str or list): one or more targets to get the variable from. variable (str): the name of the variable Returns: the value of `variable` set on `targets` (list) Example: >>> ENGINE = get_manager().engine() >>> ENGINE.set_target_variable(targets, 'MY-VAR', 'Hello World') >>> ENGINE.get_target_variable(targets, 'MY-VAR') ['Hello World'] Equivalent Jam code: MY-VAR on $(targets) = "Hello World" ; echo [ on $(targets) return $(MY-VAR) ] ; "Hello World" """ if isinstance(targets, str): targets = [targets] assert is_iterable(targets) assert isinstance(variable, basestring) return bjam_interface.call('get-target-variable', targets, variable)
[ "def", "get_target_variable", "(", "self", ",", "targets", ",", "variable", ")", ":", "if", "isinstance", "(", "targets", ",", "str", ")", ":", "targets", "=", "[", "targets", "]", "assert", "is_iterable", "(", "targets", ")", "assert", "isinstance", "(", "variable", ",", "basestring", ")", "return", "bjam_interface", ".", "call", "(", "'get-target-variable'", ",", "targets", ",", "variable", ")" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/boost/boost_1_68_0/tools/build/src/build/engine.py#L93-L121
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Draft/draftguitools/gui_annotationstyleeditor.py
python
AnnotationStyleEditor.on_delete
(self)
Execute as a callback when the delete button is pressed.
Execute as a callback when the delete button is pressed.
[ "Execute", "as", "a", "callback", "when", "the", "delete", "button", "is", "pressed", "." ]
def on_delete(self): """Execute as a callback when the delete button is pressed.""" index = self.form.comboBoxStyles.currentIndex() style = self.form.comboBoxStyles.itemText(index) if self.get_style_users(style): reply = QtGui.QMessageBox.question(None, "Style in use", "This style is used by some objects in this document. Are you sure?", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No) if reply == QtGui.QMessageBox.No: return self.form.comboBoxStyles.removeItem(index) del self.styles[style]
[ "def", "on_delete", "(", "self", ")", ":", "index", "=", "self", ".", "form", ".", "comboBoxStyles", ".", "currentIndex", "(", ")", "style", "=", "self", ".", "form", ".", "comboBoxStyles", ".", "itemText", "(", "index", ")", "if", "self", ".", "get_style_users", "(", "style", ")", ":", "reply", "=", "QtGui", ".", "QMessageBox", ".", "question", "(", "None", ",", "\"Style in use\"", ",", "\"This style is used by some objects in this document. Are you sure?\"", ",", "QtGui", ".", "QMessageBox", ".", "Yes", "|", "QtGui", ".", "QMessageBox", ".", "No", ",", "QtGui", ".", "QMessageBox", ".", "No", ")", "if", "reply", "==", "QtGui", ".", "QMessageBox", ".", "No", ":", "return", "self", ".", "form", ".", "comboBoxStyles", ".", "removeItem", "(", "index", ")", "del", "self", ".", "styles", "[", "style", "]" ]
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftguitools/gui_annotationstyleeditor.py#L245-L259
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/distribute/v1/all_reduce.py
python
_build_nccl_hybrid
(input_tensors, red_op, upper_level_f)
return output_tensors
Construct a subgraph for NCCL hybrid all-reduce. Args: input_tensors: list of `tf.Tensor` of same-shape and type values to be reduced. red_op: binary elementwise reduction operator. upper_level_f: function for reducing one value per worker, across workers. Returns: list of `tf.Tensor` of reduced values. Raises: ValueError: inputs not well-formed.
Construct a subgraph for NCCL hybrid all-reduce.
[ "Construct", "a", "subgraph", "for", "NCCL", "hybrid", "all", "-", "reduce", "." ]
def _build_nccl_hybrid(input_tensors, red_op, upper_level_f): """Construct a subgraph for NCCL hybrid all-reduce. Args: input_tensors: list of `tf.Tensor` of same-shape and type values to be reduced. red_op: binary elementwise reduction operator. upper_level_f: function for reducing one value per worker, across workers. Returns: list of `tf.Tensor` of reduced values. Raises: ValueError: inputs not well-formed. """ input_tensors, shape = _flatten_tensors(input_tensors) devices = [t.device for t in input_tensors] per_worker_devices, per_worker_values = _split_by_task(devices, input_tensors) num_workers = len(per_worker_devices) up_values = [None for w in range(0, num_workers)] up_devices = up_values[:] down_values = up_values[:] # First stage: reduce within each worker using NCCL for w in range(0, num_workers): worker_values = build_nccl_all_reduce(per_worker_values[w], red_op) # NOTE: these reductions will not run to completion unless # every output value is used. Since we only need one, we # need to put control dependencies on the rest. with ops.control_dependencies(worker_values): with ops.device(worker_values[0].device): up_values[w] = array_ops.identity(worker_values[0]) up_devices[w] = per_worker_devices[w][0] # Second stage: Apply upper_level_f to reduce across first device at # each worker level_2_output = upper_level_f(up_values) # Third stage: propagate within each worker using NCCL Broadcast for w in range(0, num_workers): dst_tensors = [] with ops.device(per_worker_devices[w][0]): broadcast_src = nccl_ops.broadcast(array_ops.identity(level_2_output[w])) for d in per_worker_devices[w]: with ops.device(d): dst_tensors.append(array_ops.identity(broadcast_src)) down_values[w] = dst_tensors output_tensors = [v for sublist in down_values for v in sublist] if len(shape) != 1: output_tensors = _reshape_tensors(output_tensors, shape) return output_tensors
[ "def", "_build_nccl_hybrid", "(", "input_tensors", ",", "red_op", ",", "upper_level_f", ")", ":", "input_tensors", ",", "shape", "=", "_flatten_tensors", "(", "input_tensors", ")", "devices", "=", "[", "t", ".", "device", "for", "t", "in", "input_tensors", "]", "per_worker_devices", ",", "per_worker_values", "=", "_split_by_task", "(", "devices", ",", "input_tensors", ")", "num_workers", "=", "len", "(", "per_worker_devices", ")", "up_values", "=", "[", "None", "for", "w", "in", "range", "(", "0", ",", "num_workers", ")", "]", "up_devices", "=", "up_values", "[", ":", "]", "down_values", "=", "up_values", "[", ":", "]", "# First stage: reduce within each worker using NCCL", "for", "w", "in", "range", "(", "0", ",", "num_workers", ")", ":", "worker_values", "=", "build_nccl_all_reduce", "(", "per_worker_values", "[", "w", "]", ",", "red_op", ")", "# NOTE: these reductions will not run to completion unless", "# every output value is used. Since we only need one, we", "# need to put control dependencies on the rest.", "with", "ops", ".", "control_dependencies", "(", "worker_values", ")", ":", "with", "ops", ".", "device", "(", "worker_values", "[", "0", "]", ".", "device", ")", ":", "up_values", "[", "w", "]", "=", "array_ops", ".", "identity", "(", "worker_values", "[", "0", "]", ")", "up_devices", "[", "w", "]", "=", "per_worker_devices", "[", "w", "]", "[", "0", "]", "# Second stage: Apply upper_level_f to reduce across first device at", "# each worker", "level_2_output", "=", "upper_level_f", "(", "up_values", ")", "# Third stage: propagate within each worker using NCCL Broadcast", "for", "w", "in", "range", "(", "0", ",", "num_workers", ")", ":", "dst_tensors", "=", "[", "]", "with", "ops", ".", "device", "(", "per_worker_devices", "[", "w", "]", "[", "0", "]", ")", ":", "broadcast_src", "=", "nccl_ops", ".", "broadcast", "(", "array_ops", ".", "identity", "(", "level_2_output", "[", "w", "]", ")", ")", "for", "d", "in", "per_worker_devices", "[", "w", "]", ":", "with", "ops", ".", "device", "(", "d", ")", ":", "dst_tensors", ".", "append", "(", "array_ops", ".", "identity", "(", "broadcast_src", ")", ")", "down_values", "[", "w", "]", "=", "dst_tensors", "output_tensors", "=", "[", "v", "for", "sublist", "in", "down_values", "for", "v", "in", "sublist", "]", "if", "len", "(", "shape", ")", "!=", "1", ":", "output_tensors", "=", "_reshape_tensors", "(", "output_tensors", ",", "shape", ")", "return", "output_tensors" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/distribute/v1/all_reduce.py#L710-L758
SoarGroup/Soar
a1c5e249499137a27da60533c72969eef3b8ab6b
scons/scons-local-4.1.0/SCons/CacheDir.py
python
CacheDir.__init__
(self, path)
Initialize a CacheDir object. The cache configuration is stored in the object. It is read from the config file in the supplied path if one exists, if not the config file is created and the default config is written, as well as saved in the object.
Initialize a CacheDir object.
[ "Initialize", "a", "CacheDir", "object", "." ]
def __init__(self, path): """ Initialize a CacheDir object. The cache configuration is stored in the object. It is read from the config file in the supplied path if one exists, if not the config file is created and the default config is written, as well as saved in the object. """ self.requests = 0 self.hits = 0 self.path = path self.current_cache_debug = None self.debugFP = None self.config = dict() if path is None: return self._readconfig(path)
[ "def", "__init__", "(", "self", ",", "path", ")", ":", "self", ".", "requests", "=", "0", "self", ".", "hits", "=", "0", "self", ".", "path", "=", "path", "self", ".", "current_cache_debug", "=", "None", "self", ".", "debugFP", "=", "None", "self", ".", "config", "=", "dict", "(", ")", "if", "path", "is", "None", ":", "return", "self", ".", "_readconfig", "(", "path", ")" ]
https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/CacheDir.py#L138-L156
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Source/ThirdParty/CEF3/pristine/cef_source/tools/cef_parser.py
python
obj_analysis.get_ptr_type
(self)
return self.ptr_type
Return the C++ class type referenced by a CefRefPtr.
Return the C++ class type referenced by a CefRefPtr.
[ "Return", "the", "C", "++", "class", "type", "referenced", "by", "a", "CefRefPtr", "." ]
def get_ptr_type(self): """ Return the C++ class type referenced by a CefRefPtr. """ if self.is_result_vector() and self.is_result_vector_ptr(): # return the vector RefPtr type return self.result_value[0]['ptr_type'] # return the basic RefPtr type return self.ptr_type
[ "def", "get_ptr_type", "(", "self", ")", ":", "if", "self", ".", "is_result_vector", "(", ")", "and", "self", ".", "is_result_vector_ptr", "(", ")", ":", "# return the vector RefPtr type", "return", "self", ".", "result_value", "[", "0", "]", "[", "'ptr_type'", "]", "# return the basic RefPtr type", "return", "self", ".", "ptr_type" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Source/ThirdParty/CEF3/pristine/cef_source/tools/cef_parser.py#L1745-L1751
eric612/MobileNet-YOLO
69b4441cb3ec8d553fbdef788ad033e246f901bd
python/caffe/coord_map.py
python
compose
(base_map, next_map)
return ax, a1 * a2, a1 * b2 + b1
Compose a base coord map with scale a1, shift b1 with a further coord map with scale a2, shift b2. The scales multiply and the further shift, b2, is scaled by base coord scale a1.
Compose a base coord map with scale a1, shift b1 with a further coord map with scale a2, shift b2. The scales multiply and the further shift, b2, is scaled by base coord scale a1.
[ "Compose", "a", "base", "coord", "map", "with", "scale", "a1", "shift", "b1", "with", "a", "further", "coord", "map", "with", "scale", "a2", "shift", "b2", ".", "The", "scales", "multiply", "and", "the", "further", "shift", "b2", "is", "scaled", "by", "base", "coord", "scale", "a1", "." ]
def compose(base_map, next_map): """ Compose a base coord map with scale a1, shift b1 with a further coord map with scale a2, shift b2. The scales multiply and the further shift, b2, is scaled by base coord scale a1. """ ax1, a1, b1 = base_map ax2, a2, b2 = next_map if ax1 is None: ax = ax2 elif ax2 is None or ax1 == ax2: ax = ax1 else: raise AxisMismatchException return ax, a1 * a2, a1 * b2 + b1
[ "def", "compose", "(", "base_map", ",", "next_map", ")", ":", "ax1", ",", "a1", ",", "b1", "=", "base_map", "ax2", ",", "a2", ",", "b2", "=", "next_map", "if", "ax1", "is", "None", ":", "ax", "=", "ax2", "elif", "ax2", "is", "None", "or", "ax1", "==", "ax2", ":", "ax", "=", "ax1", "else", ":", "raise", "AxisMismatchException", "return", "ax", ",", "a1", "*", "a2", ",", "a1", "*", "b2", "+", "b1" ]
https://github.com/eric612/MobileNet-YOLO/blob/69b4441cb3ec8d553fbdef788ad033e246f901bd/python/caffe/coord_map.py#L89-L103
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/setuptools/command/build_py.py
python
build_py.check_package
(self, package, package_dir)
return init_py
Check namespace packages' __init__ for declare_namespace
Check namespace packages' __init__ for declare_namespace
[ "Check", "namespace", "packages", "__init__", "for", "declare_namespace" ]
def check_package(self, package, package_dir): """Check namespace packages' __init__ for declare_namespace""" try: return self.packages_checked[package] except KeyError: pass init_py = orig.build_py.check_package(self, package, package_dir) self.packages_checked[package] = init_py if not init_py or not self.distribution.namespace_packages: return init_py for pkg in self.distribution.namespace_packages: if pkg == package or pkg.startswith(package + '.'): break else: return init_py with io.open(init_py, 'rb') as f: contents = f.read() if b'declare_namespace' not in contents: raise distutils.errors.DistutilsError( "Namespace package problem: %s is a namespace package, but " "its\n__init__.py does not call declare_namespace()! Please " 'fix it.\n(See the setuptools manual under ' '"Namespace Packages" for details.)\n"' % (package,) ) return init_py
[ "def", "check_package", "(", "self", ",", "package", ",", "package_dir", ")", ":", "try", ":", "return", "self", ".", "packages_checked", "[", "package", "]", "except", "KeyError", ":", "pass", "init_py", "=", "orig", ".", "build_py", ".", "check_package", "(", "self", ",", "package", ",", "package_dir", ")", "self", ".", "packages_checked", "[", "package", "]", "=", "init_py", "if", "not", "init_py", "or", "not", "self", ".", "distribution", ".", "namespace_packages", ":", "return", "init_py", "for", "pkg", "in", "self", ".", "distribution", ".", "namespace_packages", ":", "if", "pkg", "==", "package", "or", "pkg", ".", "startswith", "(", "package", "+", "'.'", ")", ":", "break", "else", ":", "return", "init_py", "with", "io", ".", "open", "(", "init_py", ",", "'rb'", ")", "as", "f", ":", "contents", "=", "f", ".", "read", "(", ")", "if", "b'declare_namespace'", "not", "in", "contents", ":", "raise", "distutils", ".", "errors", ".", "DistutilsError", "(", "\"Namespace package problem: %s is a namespace package, but \"", "\"its\\n__init__.py does not call declare_namespace()! Please \"", "'fix it.\\n(See the setuptools manual under '", "'\"Namespace Packages\" for details.)\\n\"'", "%", "(", "package", ",", ")", ")", "return", "init_py" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/setuptools/command/build_py.py#L162-L190
gem5/gem5
141cc37c2d4b93959d4c249b8f7e6a8b2ef75338
ext/ply/example/ansic/cparse.py
python
p_constant
(t)
constant : ICONST | FCONST | CCONST
constant : ICONST | FCONST | CCONST
[ "constant", ":", "ICONST", "|", "FCONST", "|", "CCONST" ]
def p_constant(t): '''constant : ICONST | FCONST | CCONST''' pass
[ "def", "p_constant", "(", "t", ")", ":", "pass" ]
https://github.com/gem5/gem5/blob/141cc37c2d4b93959d4c249b8f7e6a8b2ef75338/ext/ply/example/ansic/cparse.py#L840-L844
syoyo/tinygltf
e7f1ff5c59d3ca2489923beb239bdf93d863498f
deps/cpplint.py
python
_FunctionState.Count
(self)
Count line in current function body.
Count line in current function body.
[ "Count", "line", "in", "current", "function", "body", "." ]
def Count(self): """Count line in current function body.""" if self.in_a_function: self.lines_in_function += 1
[ "def", "Count", "(", "self", ")", ":", "if", "self", ".", "in_a_function", ":", "self", ".", "lines_in_function", "+=", "1" ]
https://github.com/syoyo/tinygltf/blob/e7f1ff5c59d3ca2489923beb239bdf93d863498f/deps/cpplint.py#L934-L937
llvm/llvm-project
ffa6262cb4e2a335d26416fad39a581b4f98c5f4
clang/utils/check_cfc/obj_diff.py
python
compare_debug_info
(objfilea, objfileb)
return first_diff(dbga, dbgb, objfilea, objfileb)
Compare debug info of two different files. Allowing unavoidable differences, such as filenames. Return the first difference if the debug info differs, or None. If there are differences in the code, there will almost certainly be differences in the debug info too.
Compare debug info of two different files. Allowing unavoidable differences, such as filenames. Return the first difference if the debug info differs, or None. If there are differences in the code, there will almost certainly be differences in the debug info too.
[ "Compare", "debug", "info", "of", "two", "different", "files", ".", "Allowing", "unavoidable", "differences", "such", "as", "filenames", ".", "Return", "the", "first", "difference", "if", "the", "debug", "info", "differs", "or", "None", ".", "If", "there", "are", "differences", "in", "the", "code", "there", "will", "almost", "certainly", "be", "differences", "in", "the", "debug", "info", "too", "." ]
def compare_debug_info(objfilea, objfileb): """Compare debug info of two different files. Allowing unavoidable differences, such as filenames. Return the first difference if the debug info differs, or None. If there are differences in the code, there will almost certainly be differences in the debug info too. """ dbga = dump_debug(objfilea) dbgb = dump_debug(objfileb) return first_diff(dbga, dbgb, objfilea, objfileb)
[ "def", "compare_debug_info", "(", "objfilea", ",", "objfileb", ")", ":", "dbga", "=", "dump_debug", "(", "objfilea", ")", "dbgb", "=", "dump_debug", "(", "objfileb", ")", "return", "first_diff", "(", "dbga", ",", "dbgb", ",", "objfilea", ",", "objfileb", ")" ]
https://github.com/llvm/llvm-project/blob/ffa6262cb4e2a335d26416fad39a581b4f98c5f4/clang/utils/check_cfc/obj_diff.py#L76-L84
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_windows.py
python
FontData.GetChosenFont
(*args, **kwargs)
return _windows_.FontData_GetChosenFont(*args, **kwargs)
GetChosenFont(self) -> Font Gets the font chosen by the user.
GetChosenFont(self) -> Font
[ "GetChosenFont", "(", "self", ")", "-", ">", "Font" ]
def GetChosenFont(*args, **kwargs): """ GetChosenFont(self) -> Font Gets the font chosen by the user. """ return _windows_.FontData_GetChosenFont(*args, **kwargs)
[ "def", "GetChosenFont", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_windows_", ".", "FontData_GetChosenFont", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_windows.py#L3485-L3491
lballabio/quantlib-old
136336947ed4fea9ecc1da6edad188700e821739
gensrc/gensrc/addins/enumerations.py
python
Enumerations.generateEnumeratedClasses
(self)
Generate source file for enumerated types.
Generate source file for enumerated types.
[ "Generate", "source", "file", "for", "enumerated", "types", "." ]
def generateEnumeratedClasses(self): """Generate source file for enumerated types.""" if not self.enumerationList_.hasEnumeratedClasses: return buffer = '' # code to register the enumeration for enumeratedClassGroup in self.enumerationList_.enumeratedClassGroups(): buffer += self.generateEnumeratedClass(enumeratedClassGroup) self.bufferEnumClasses_.set({ 'buffer' : buffer }) fileName = self.rootPath_ + 'register_classes.cpp' outputfile.OutputFile(self, fileName, self.enumerationList_.enumeratedClassCopyright(), self.bufferEnumClasses_)
[ "def", "generateEnumeratedClasses", "(", "self", ")", ":", "if", "not", "self", ".", "enumerationList_", ".", "hasEnumeratedClasses", ":", "return", "buffer", "=", "''", "# code to register the enumeration", "for", "enumeratedClassGroup", "in", "self", ".", "enumerationList_", ".", "enumeratedClassGroups", "(", ")", ":", "buffer", "+=", "self", ".", "generateEnumeratedClass", "(", "enumeratedClassGroup", ")", "self", ".", "bufferEnumClasses_", ".", "set", "(", "{", "'buffer'", ":", "buffer", "}", ")", "fileName", "=", "self", ".", "rootPath_", "+", "'register_classes.cpp'", "outputfile", ".", "OutputFile", "(", "self", ",", "fileName", ",", "self", ".", "enumerationList_", ".", "enumeratedClassCopyright", "(", ")", ",", "self", ".", "bufferEnumClasses_", ")" ]
https://github.com/lballabio/quantlib-old/blob/136336947ed4fea9ecc1da6edad188700e821739/gensrc/gensrc/addins/enumerations.py#L98-L109
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py2/sklearn/svm/base.py
python
BaseLibSVM._decision_function
(self, X)
return dec_func
Distance of the samples X to the separating hyperplane. Parameters ---------- X : array-like, shape (n_samples, n_features) Returns ------- X : array-like, shape (n_samples, n_class * (n_class-1) / 2) Returns the decision function of the sample for each class in the model.
Distance of the samples X to the separating hyperplane.
[ "Distance", "of", "the", "samples", "X", "to", "the", "separating", "hyperplane", "." ]
def _decision_function(self, X): """Distance of the samples X to the separating hyperplane. Parameters ---------- X : array-like, shape (n_samples, n_features) Returns ------- X : array-like, shape (n_samples, n_class * (n_class-1) / 2) Returns the decision function of the sample for each class in the model. """ # NOTE: _validate_for_predict contains check for is_fitted # hence must be placed before any other attributes are used. X = self._validate_for_predict(X) X = self._compute_kernel(X) if self._sparse: dec_func = self._sparse_decision_function(X) else: dec_func = self._dense_decision_function(X) # In binary case, we need to flip the sign of coef, intercept and # decision function. if self._impl in ['c_svc', 'nu_svc'] and len(self.classes_) == 2: return -dec_func.ravel() return dec_func
[ "def", "_decision_function", "(", "self", ",", "X", ")", ":", "# NOTE: _validate_for_predict contains check for is_fitted", "# hence must be placed before any other attributes are used.", "X", "=", "self", ".", "_validate_for_predict", "(", "X", ")", "X", "=", "self", ".", "_compute_kernel", "(", "X", ")", "if", "self", ".", "_sparse", ":", "dec_func", "=", "self", ".", "_sparse_decision_function", "(", "X", ")", "else", ":", "dec_func", "=", "self", ".", "_dense_decision_function", "(", "X", ")", "# In binary case, we need to flip the sign of coef, intercept and", "# decision function.", "if", "self", ".", "_impl", "in", "[", "'c_svc'", ",", "'nu_svc'", "]", "and", "len", "(", "self", ".", "classes_", ")", "==", "2", ":", "return", "-", "dec_func", ".", "ravel", "(", ")", "return", "dec_func" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/svm/base.py#L389-L417
echronos/echronos
c996f1d2c8af6c6536205eb319c1bf1d4d84569c
external_tools/pystache/renderer.py
python
Renderer._interpolate
(self, val, formatter_key, location)
return formatter(val)
Convert a value to string.
Convert a value to string.
[ "Convert", "a", "value", "to", "string", "." ]
def _interpolate(self, val, formatter_key, location): """Convert a value to string. """ try: formatter = self.formatters[formatter_key] except: raise FormatterNotFoundError(formatter_key, location) if isinstance(val, bytes): val = self._bytes_to_str(val) elif not isinstance(val, str): val = str(val) return formatter(val)
[ "def", "_interpolate", "(", "self", ",", "val", ",", "formatter_key", ",", "location", ")", ":", "try", ":", "formatter", "=", "self", ".", "formatters", "[", "formatter_key", "]", "except", ":", "raise", "FormatterNotFoundError", "(", "formatter_key", ",", "location", ")", "if", "isinstance", "(", "val", ",", "bytes", ")", ":", "val", "=", "self", ".", "_bytes_to_str", "(", "val", ")", "elif", "not", "isinstance", "(", "val", ",", "str", ")", ":", "val", "=", "str", "(", "val", ")", "return", "formatter", "(", "val", ")" ]
https://github.com/echronos/echronos/blob/c996f1d2c8af6c6536205eb319c1bf1d4d84569c/external_tools/pystache/renderer.py#L168-L180
mingchen/protobuf-ios
0958df34558cd54cb7b6e6ca5c8855bf3d475046
compiler/python/google/protobuf/internal/encoder.py
python
Encoder.AppendSInt64NoTag
(self, value)
Appends a 64-bit integer to our buffer, zigzag-encoded and then varint-encoded.
Appends a 64-bit integer to our buffer, zigzag-encoded and then varint-encoded.
[ "Appends", "a", "64", "-", "bit", "integer", "to", "our", "buffer", "zigzag", "-", "encoded", "and", "then", "varint", "-", "encoded", "." ]
def AppendSInt64NoTag(self, value): """Appends a 64-bit integer to our buffer, zigzag-encoded and then varint-encoded. """ zigzag_value = wire_format.ZigZagEncode(value) self._stream.AppendVarUInt64(zigzag_value)
[ "def", "AppendSInt64NoTag", "(", "self", ",", "value", ")", ":", "zigzag_value", "=", "wire_format", ".", "ZigZagEncode", "(", "value", ")", "self", ".", "_stream", ".", "AppendVarUInt64", "(", "zigzag_value", ")" ]
https://github.com/mingchen/protobuf-ios/blob/0958df34558cd54cb7b6e6ca5c8855bf3d475046/compiler/python/google/protobuf/internal/encoder.py#L87-L92
hughperkins/tf-coriander
970d3df6c11400ad68405f22b0c42a52374e94ca
tensorflow/python/ops/math_grad.py
python
_TanhGrad
(op, grad)
Returns grad * (1 - tanh(x) * tanh(x)).
Returns grad * (1 - tanh(x) * tanh(x)).
[ "Returns", "grad", "*", "(", "1", "-", "tanh", "(", "x", ")", "*", "tanh", "(", "x", "))", "." ]
def _TanhGrad(op, grad): """Returns grad * (1 - tanh(x) * tanh(x)).""" y = op.outputs[0] # y = tanh(x) with ops.control_dependencies([grad.op]): y = math_ops.conj(y) # pylint: disable=protected-access return gen_math_ops._tanh_grad(y, grad)
[ "def", "_TanhGrad", "(", "op", ",", "grad", ")", ":", "y", "=", "op", ".", "outputs", "[", "0", "]", "# y = tanh(x)", "with", "ops", ".", "control_dependencies", "(", "[", "grad", ".", "op", "]", ")", ":", "y", "=", "math_ops", ".", "conj", "(", "y", ")", "# pylint: disable=protected-access", "return", "gen_math_ops", ".", "_tanh_grad", "(", "y", ",", "grad", ")" ]
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/ops/math_grad.py#L330-L336
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/idlelib/PyShell.py
python
PyShellEditorWindow.store_file_breaks
(self)
Save breakpoints when file is saved
Save breakpoints when file is saved
[ "Save", "breakpoints", "when", "file", "is", "saved" ]
def store_file_breaks(self): "Save breakpoints when file is saved" # XXX 13 Dec 2002 KBK Currently the file must be saved before it can # be run. The breaks are saved at that time. If we introduce # a temporary file save feature the save breaks functionality # needs to be re-verified, since the breaks at the time the # temp file is created may differ from the breaks at the last # permanent save of the file. Currently, a break introduced # after a save will be effective, but not persistent. # This is necessary to keep the saved breaks synched with the # saved file. # # Breakpoints are set as tagged ranges in the text. Certain # kinds of edits cause these ranges to be deleted: Inserting # or deleting a line just before a breakpoint, and certain # deletions prior to a breakpoint. These issues need to be # investigated and understood. It's not clear if they are # Tk issues or IDLE issues, or whether they can actually # be fixed. Since a modified file has to be saved before it is # run, and since self.breakpoints (from which the subprocess # debugger is loaded) is updated during the save, the visible # breaks stay synched with the subprocess even if one of these # unexpected breakpoint deletions occurs. breaks = self.breakpoints filename = self.io.filename try: with open(self.breakpointPath,"r") as old_file: lines = old_file.readlines() except IOError: lines = [] try: with open(self.breakpointPath,"w") as new_file: for line in lines: if not line.startswith(filename + '='): new_file.write(line) self.update_breakpoints() breaks = self.breakpoints if breaks: new_file.write(filename + '=' + str(breaks) + '\n') except IOError as err: if not getattr(self.root, "breakpoint_error_displayed", False): self.root.breakpoint_error_displayed = True tkMessageBox.showerror(title='IDLE Error', message='Unable to update breakpoint list:\n%s' % str(err), parent=self.text)
[ "def", "store_file_breaks", "(", "self", ")", ":", "# XXX 13 Dec 2002 KBK Currently the file must be saved before it can", "# be run. The breaks are saved at that time. If we introduce", "# a temporary file save feature the save breaks functionality", "# needs to be re-verified, since the breaks at the time the", "# temp file is created may differ from the breaks at the last", "# permanent save of the file. Currently, a break introduced", "# after a save will be effective, but not persistent.", "# This is necessary to keep the saved breaks synched with the", "# saved file.", "#", "# Breakpoints are set as tagged ranges in the text. Certain", "# kinds of edits cause these ranges to be deleted: Inserting", "# or deleting a line just before a breakpoint, and certain", "# deletions prior to a breakpoint. These issues need to be", "# investigated and understood. It's not clear if they are", "# Tk issues or IDLE issues, or whether they can actually", "# be fixed. Since a modified file has to be saved before it is", "# run, and since self.breakpoints (from which the subprocess", "# debugger is loaded) is updated during the save, the visible", "# breaks stay synched with the subprocess even if one of these", "# unexpected breakpoint deletions occurs.", "breaks", "=", "self", ".", "breakpoints", "filename", "=", "self", ".", "io", ".", "filename", "try", ":", "with", "open", "(", "self", ".", "breakpointPath", ",", "\"r\"", ")", "as", "old_file", ":", "lines", "=", "old_file", ".", "readlines", "(", ")", "except", "IOError", ":", "lines", "=", "[", "]", "try", ":", "with", "open", "(", "self", ".", "breakpointPath", ",", "\"w\"", ")", "as", "new_file", ":", "for", "line", "in", "lines", ":", "if", "not", "line", ".", "startswith", "(", "filename", "+", "'='", ")", ":", "new_file", ".", "write", "(", "line", ")", "self", ".", "update_breakpoints", "(", ")", "breaks", "=", "self", ".", "breakpoints", "if", "breaks", ":", "new_file", ".", "write", "(", "filename", "+", "'='", "+", "str", "(", "breaks", ")", "+", "'\\n'", ")", "except", "IOError", "as", "err", ":", "if", "not", "getattr", "(", "self", ".", "root", ",", "\"breakpoint_error_displayed\"", ",", "False", ")", ":", "self", ".", "root", ".", "breakpoint_error_displayed", "=", "True", "tkMessageBox", ".", "showerror", "(", "title", "=", "'IDLE Error'", ",", "message", "=", "'Unable to update breakpoint list:\\n%s'", "%", "str", "(", "err", ")", ",", "parent", "=", "self", ".", "text", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/idlelib/PyShell.py#L192-L237
naver/sling
5671cd445a2caae0b4dd0332299e4cfede05062c
webkit/Tools/Scripts/webkitpy/thirdparty/irc/irclib.py
python
ServerConnection.time
(self, server="")
Send a TIME command.
Send a TIME command.
[ "Send", "a", "TIME", "command", "." ]
def time(self, server=""): """Send a TIME command.""" self.send_raw("TIME" + (server and (" " + server)))
[ "def", "time", "(", "self", ",", "server", "=", "\"\"", ")", ":", "self", ".", "send_raw", "(", "\"TIME\"", "+", "(", "server", "and", "(", "\" \"", "+", "server", ")", ")", ")" ]
https://github.com/naver/sling/blob/5671cd445a2caae0b4dd0332299e4cfede05062c/webkit/Tools/Scripts/webkitpy/thirdparty/irc/irclib.py#L805-L807
trilinos/Trilinos
6168be6dd51e35e1cd681e9c4b24433e709df140
packages/seacas/scripts/exodus2.in.py
python
exodus.put_elem_blk_names
(self, names)
exo.put_elem_blk_names(elem_blk_names) -> store a list of all element block names ordered by block *INDEX*; (see description of get_elem_blk_ids() for explanation of the difference between block *ID* and block *INDEX*) input value(s): <list<string>> elem_blk_names
exo.put_elem_blk_names(elem_blk_names)
[ "exo", ".", "put_elem_blk_names", "(", "elem_blk_names", ")" ]
def put_elem_blk_names(self, names): """ exo.put_elem_blk_names(elem_blk_names) -> store a list of all element block names ordered by block *INDEX*; (see description of get_elem_blk_ids() for explanation of the difference between block *ID* and block *INDEX*) input value(s): <list<string>> elem_blk_names """ objType = ex_entity_type("EX_ELEM_BLOCK") inqType = ex_inquiry("EX_INQ_ELEM_BLK") self.__ex_put_names(objType, inqType, names)
[ "def", "put_elem_blk_names", "(", "self", ",", "names", ")", ":", "objType", "=", "ex_entity_type", "(", "\"EX_ELEM_BLOCK\"", ")", "inqType", "=", "ex_inquiry", "(", "\"EX_INQ_ELEM_BLK\"", ")", "self", ".", "__ex_put_names", "(", "objType", ",", "inqType", ",", "names", ")" ]
https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/scripts/exodus2.in.py#L1278-L1291
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/genmsg/src/genmsg/msg_loader.py
python
MsgContext.get_registered
(self, full_msg_type)
:raises: :exc:`KeyError` If not registered
:raises: :exc:`KeyError` If not registered
[ ":", "raises", ":", ":", "exc", ":", "KeyError", "If", "not", "registered" ]
def get_registered(self, full_msg_type): """ :raises: :exc:`KeyError` If not registered """ full_msg_type = bare_msg_type(full_msg_type) if self.is_registered(full_msg_type): package, base_type = package_resource_name(full_msg_type) return self._registered_packages[package][base_type] else: raise KeyError(full_msg_type)
[ "def", "get_registered", "(", "self", ",", "full_msg_type", ")", ":", "full_msg_type", "=", "bare_msg_type", "(", "full_msg_type", ")", "if", "self", ".", "is_registered", "(", "full_msg_type", ")", ":", "package", ",", "base_type", "=", "package_resource_name", "(", "full_msg_type", ")", "return", "self", ".", "_registered_packages", "[", "package", "]", "[", "base_type", "]", "else", ":", "raise", "KeyError", "(", "full_msg_type", ")" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/genmsg/src/genmsg/msg_loader.py#L429-L438
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/idlelib/PyShell.py
python
ModifiedInterpreter.runcommand
(self, code)
return 1
Run the code without invoking the debugger
Run the code without invoking the debugger
[ "Run", "the", "code", "without", "invoking", "the", "debugger" ]
def runcommand(self, code): "Run the code without invoking the debugger" # The code better not raise an exception! if self.tkconsole.executing: self.display_executing_dialog() return 0 if self.rpcclt: self.rpcclt.remotequeue("exec", "runcode", (code,), {}) else: exec code in self.locals return 1
[ "def", "runcommand", "(", "self", ",", "code", ")", ":", "# The code better not raise an exception!", "if", "self", ".", "tkconsole", ".", "executing", ":", "self", ".", "display_executing_dialog", "(", ")", "return", "0", "if", "self", ".", "rpcclt", ":", "self", ".", "rpcclt", ".", "remotequeue", "(", "\"exec\"", ",", "\"runcode\"", ",", "(", "code", ",", ")", ",", "{", "}", ")", "else", ":", "exec", "code", "in", "self", ".", "locals", "return", "1" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/idlelib/PyShell.py#L736-L746
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/piexif/_insert.py
python
insert
(exif, image, new_file=None)
py:function:: piexif.insert(exif_bytes, filename) Insert exif into JPEG. :param bytes exif_bytes: Exif as bytes :param str filename: JPEG
py:function:: piexif.insert(exif_bytes, filename)
[ "py", ":", "function", "::", "piexif", ".", "insert", "(", "exif_bytes", "filename", ")" ]
def insert(exif, image, new_file=None): """ py:function:: piexif.insert(exif_bytes, filename) Insert exif into JPEG. :param bytes exif_bytes: Exif as bytes :param str filename: JPEG """ if exif[0:6] != b"\x45\x78\x69\x66\x00\x00": raise ValueError("Given data is not exif data") output_file = False # Prevents "UnicodeWarning: Unicode equal comparison failed" warnings on Python 2 maybe_image = sys.version_info >= (3,0,0) or isinstance(image, str) if maybe_image and image[0:2] == b"\xff\xd8": image_data = image file_type = "jpeg" elif maybe_image and image[0:4] == b"RIFF" and image[8:12] == b"WEBP": image_data = image file_type = "webp" else: with open(image, 'rb') as f: image_data = f.read() if image_data[0:2] == b"\xff\xd8": file_type = "jpeg" elif image_data[0:4] == b"RIFF" and image_data[8:12] == b"WEBP": file_type = "webp" else: raise InvalidImageDataError output_file = True if file_type == "jpeg": exif = b"\xff\xe1" + struct.pack(">H", len(exif) + 2) + exif segments = split_into_segments(image_data) new_data = merge_segments(segments, exif) elif file_type == "webp": exif = exif[6:] new_data = _webp.insert(image_data, exif) if isinstance(new_file, io.BytesIO): new_file.write(new_data) new_file.seek(0) elif new_file: with open(new_file, "wb+") as f: f.write(new_data) elif output_file: with open(image, "wb+") as f: f.write(new_data) else: raise ValueError("Give a 3rd argument to 'insert' to output file")
[ "def", "insert", "(", "exif", ",", "image", ",", "new_file", "=", "None", ")", ":", "if", "exif", "[", "0", ":", "6", "]", "!=", "b\"\\x45\\x78\\x69\\x66\\x00\\x00\"", ":", "raise", "ValueError", "(", "\"Given data is not exif data\"", ")", "output_file", "=", "False", "# Prevents \"UnicodeWarning: Unicode equal comparison failed\" warnings on Python 2", "maybe_image", "=", "sys", ".", "version_info", ">=", "(", "3", ",", "0", ",", "0", ")", "or", "isinstance", "(", "image", ",", "str", ")", "if", "maybe_image", "and", "image", "[", "0", ":", "2", "]", "==", "b\"\\xff\\xd8\"", ":", "image_data", "=", "image", "file_type", "=", "\"jpeg\"", "elif", "maybe_image", "and", "image", "[", "0", ":", "4", "]", "==", "b\"RIFF\"", "and", "image", "[", "8", ":", "12", "]", "==", "b\"WEBP\"", ":", "image_data", "=", "image", "file_type", "=", "\"webp\"", "else", ":", "with", "open", "(", "image", ",", "'rb'", ")", "as", "f", ":", "image_data", "=", "f", ".", "read", "(", ")", "if", "image_data", "[", "0", ":", "2", "]", "==", "b\"\\xff\\xd8\"", ":", "file_type", "=", "\"jpeg\"", "elif", "image_data", "[", "0", ":", "4", "]", "==", "b\"RIFF\"", "and", "image_data", "[", "8", ":", "12", "]", "==", "b\"WEBP\"", ":", "file_type", "=", "\"webp\"", "else", ":", "raise", "InvalidImageDataError", "output_file", "=", "True", "if", "file_type", "==", "\"jpeg\"", ":", "exif", "=", "b\"\\xff\\xe1\"", "+", "struct", ".", "pack", "(", "\">H\"", ",", "len", "(", "exif", ")", "+", "2", ")", "+", "exif", "segments", "=", "split_into_segments", "(", "image_data", ")", "new_data", "=", "merge_segments", "(", "segments", ",", "exif", ")", "elif", "file_type", "==", "\"webp\"", ":", "exif", "=", "exif", "[", "6", ":", "]", "new_data", "=", "_webp", ".", "insert", "(", "image_data", ",", "exif", ")", "if", "isinstance", "(", "new_file", ",", "io", ".", "BytesIO", ")", ":", "new_file", ".", "write", "(", "new_data", ")", "new_file", ".", "seek", "(", "0", ")", "elif", "new_file", ":", "with", "open", "(", "new_file", ",", "\"wb+\"", ")", "as", "f", ":", "f", ".", "write", "(", "new_data", ")", "elif", "output_file", ":", "with", "open", "(", "image", ",", "\"wb+\"", ")", "as", "f", ":", "f", ".", "write", "(", "new_data", ")", "else", ":", "raise", "ValueError", "(", "\"Give a 3rd argument to 'insert' to output file\"", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/piexif/_insert.py#L9-L60
OSGeo/gdal
3748fc4ba4fba727492774b2b908a2130c864a83
swig/python/osgeo/ogr.py
python
Geometry.GetSpatialReference
(self, *args)
return _ogr.Geometry_GetSpatialReference(self, *args)
r""" GetSpatialReference(Geometry self) -> SpatialReference OGRSpatialReferenceH OGR_G_GetSpatialReference(OGRGeometryH hGeom) Returns spatial reference system for geometry. This function relates to the SFCOM IGeometry::get_SpatialReference() method. This function is the same as the CPP method OGRGeometry::getSpatialReference(). Parameters: ----------- hGeom: handle on the geometry to get spatial reference from. a reference to the spatial reference geometry.
r""" GetSpatialReference(Geometry self) -> SpatialReference OGRSpatialReferenceH OGR_G_GetSpatialReference(OGRGeometryH hGeom)
[ "r", "GetSpatialReference", "(", "Geometry", "self", ")", "-", ">", "SpatialReference", "OGRSpatialReferenceH", "OGR_G_GetSpatialReference", "(", "OGRGeometryH", "hGeom", ")" ]
def GetSpatialReference(self, *args): r""" GetSpatialReference(Geometry self) -> SpatialReference OGRSpatialReferenceH OGR_G_GetSpatialReference(OGRGeometryH hGeom) Returns spatial reference system for geometry. This function relates to the SFCOM IGeometry::get_SpatialReference() method. This function is the same as the CPP method OGRGeometry::getSpatialReference(). Parameters: ----------- hGeom: handle on the geometry to get spatial reference from. a reference to the spatial reference geometry. """ return _ogr.Geometry_GetSpatialReference(self, *args)
[ "def", "GetSpatialReference", "(", "self", ",", "*", "args", ")", ":", "return", "_ogr", ".", "Geometry_GetSpatialReference", "(", "self", ",", "*", "args", ")" ]
https://github.com/OSGeo/gdal/blob/3748fc4ba4fba727492774b2b908a2130c864a83/swig/python/osgeo/ogr.py#L6987-L7008
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/autograph/pyct/static_analysis/activity.py
python
Scope.merge_from
(self, other)
Adds all activity from another scope to this scope.
Adds all activity from another scope to this scope.
[ "Adds", "all", "activity", "from", "another", "scope", "to", "this", "scope", "." ]
def merge_from(self, other): """Adds all activity from another scope to this scope.""" assert not self.is_final if self.parent is not None: assert other.parent is not None self.parent.merge_from(other.parent) self.isolated_names.update(other.isolated_names) self.read.update(other.read) self.modified.update(other.modified) self.bound.update(other.bound) self.deleted.update(other.deleted) self.annotations.update(other.annotations) self.params.update(other.params)
[ "def", "merge_from", "(", "self", ",", "other", ")", ":", "assert", "not", "self", ".", "is_final", "if", "self", ".", "parent", "is", "not", "None", ":", "assert", "other", ".", "parent", "is", "not", "None", "self", ".", "parent", ".", "merge_from", "(", "other", ".", "parent", ")", "self", ".", "isolated_names", ".", "update", "(", "other", ".", "isolated_names", ")", "self", ".", "read", ".", "update", "(", "other", ".", "read", ")", "self", ".", "modified", ".", "update", "(", "other", ".", "modified", ")", "self", ".", "bound", ".", "update", "(", "other", ".", "bound", ")", "self", ".", "deleted", ".", "update", "(", "other", ".", "deleted", ")", "self", ".", "annotations", ".", "update", "(", "other", ".", "annotations", ")", "self", ".", "params", ".", "update", "(", "other", ".", "params", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/autograph/pyct/static_analysis/activity.py#L167-L179
idaholab/moose
9eeebc65e098b4c30f8205fb41591fd5b61eb6ff
python/MooseDocs/common/load_config.py
python
_yaml_load_content
(config, in_ext)
return MooseDocs.common.get_content(items, in_ext)
Load the 'Content' section.
Load the 'Content' section.
[ "Load", "the", "Content", "section", "." ]
def _yaml_load_content(config, in_ext): """Load the 'Content' section.""" options = config.get('Content', None) if options is None: msg = "The 'Content' section is required." raise exceptions.MooseDocsException(msg) items = MooseDocs.common.get_items(options) return MooseDocs.common.get_content(items, in_ext)
[ "def", "_yaml_load_content", "(", "config", ",", "in_ext", ")", ":", "options", "=", "config", ".", "get", "(", "'Content'", ",", "None", ")", "if", "options", "is", "None", ":", "msg", "=", "\"The 'Content' section is required.\"", "raise", "exceptions", ".", "MooseDocsException", "(", "msg", ")", "items", "=", "MooseDocs", ".", "common", ".", "get_items", "(", "options", ")", "return", "MooseDocs", ".", "common", ".", "get_content", "(", "items", ",", "in_ext", ")" ]
https://github.com/idaholab/moose/blob/9eeebc65e098b4c30f8205fb41591fd5b61eb6ff/python/MooseDocs/common/load_config.py#L240-L248
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/nn/cell.py
python
Cell.set_broadcast_flag
(self, mode=True)
return self
Set parameter broadcast mode for this cell. Args: mode (bool): Specifies whether the mode is parameter broadcast. Default: True.
Set parameter broadcast mode for this cell.
[ "Set", "parameter", "broadcast", "mode", "for", "this", "cell", "." ]
def set_broadcast_flag(self, mode=True): """ Set parameter broadcast mode for this cell. Args: mode (bool): Specifies whether the mode is parameter broadcast. Default: True. """ self.add_flags_recursive(broadcast_flag=mode) return self
[ "def", "set_broadcast_flag", "(", "self", ",", "mode", "=", "True", ")", ":", "self", ".", "add_flags_recursive", "(", "broadcast_flag", "=", "mode", ")", "return", "self" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/nn/cell.py#L1425-L1433
hughperkins/tf-coriander
970d3df6c11400ad68405f22b0c42a52374e94ca
tensorflow/contrib/framework/python/framework/deprecation.py
python
deprecated_args
(date, instructions, *deprecated_arg_names)
return deprecated_wrapper
Decorator for marking specific function arguments as deprecated. This decorator logs a deprecation warning whenever the decorated function is called with the deprecated argument. It has the following format: Calling <function> (from <module>) with <arg> is deprecated and will be removed after <date>. Instructions for updating: <instructions> <function> will include the class name if it is a method. It also edits the docstring of the function: ' (deprecated arguments)' is appended to the first line of the docstring and a deprecation notice is prepended to the rest of the docstring. Args: date: String. The date the function is scheduled to be removed. Must be ISO 8601 (YYYY-MM-DD). instructions: String. Instructions on how to update code using the deprecated function. *deprecated_arg_names: String. The deprecated arguments. Returns: Decorated function or method. Raises: ValueError: If date is not in ISO 8601 format, instructions are empty, or the deprecated arguments are not present in the function signature.
Decorator for marking specific function arguments as deprecated.
[ "Decorator", "for", "marking", "specific", "function", "arguments", "as", "deprecated", "." ]
def deprecated_args(date, instructions, *deprecated_arg_names): """Decorator for marking specific function arguments as deprecated. This decorator logs a deprecation warning whenever the decorated function is called with the deprecated argument. It has the following format: Calling <function> (from <module>) with <arg> is deprecated and will be removed after <date>. Instructions for updating: <instructions> <function> will include the class name if it is a method. It also edits the docstring of the function: ' (deprecated arguments)' is appended to the first line of the docstring and a deprecation notice is prepended to the rest of the docstring. Args: date: String. The date the function is scheduled to be removed. Must be ISO 8601 (YYYY-MM-DD). instructions: String. Instructions on how to update code using the deprecated function. *deprecated_arg_names: String. The deprecated arguments. Returns: Decorated function or method. Raises: ValueError: If date is not in ISO 8601 format, instructions are empty, or the deprecated arguments are not present in the function signature. """ _validate_deprecation_args(date, instructions) if not deprecated_arg_names: raise ValueError('Specify which argument is deprecated.') def deprecated_wrapper(func): """Deprecation decorator.""" _validate_callable(func, 'deprecated_args') arg_spec = inspect.getargspec(func) deprecated_positions = [ (i, arg_name) for (i, arg_name) in enumerate(arg_spec.args) if arg_name in deprecated_arg_names] is_varargs_deprecated = arg_spec.varargs in deprecated_arg_names is_kwargs_deprecated = arg_spec.keywords in deprecated_arg_names if (len(deprecated_positions) + is_varargs_deprecated + is_kwargs_deprecated != len(deprecated_arg_names)): known_args = arg_spec.args + [arg_spec.varargs, arg_spec.keywords] missing_args = [arg_name for arg_name in deprecated_arg_names if arg_name not in known_args] raise ValueError('The following deprecated arguments are not present ' 'in the function signature: %s' % missing_args) @functools.wraps(func) def new_func(*args, **kwargs): """Deprecation wrapper.""" invalid_args = [] for (i, arg_name) in deprecated_positions: if i < len(args): invalid_args.append(arg_name) if is_varargs_deprecated and len(args) > len(arg_spec.args): invalid_args.append(arg_spec.varargs) if is_kwargs_deprecated and kwargs: invalid_args.append(arg_spec.keywords) for arg_name in deprecated_arg_names: if arg_name in kwargs: invalid_args.append(arg_name) for arg_name in invalid_args: logging.warning( 'Calling %s (from %s) with %s is deprecated and will be removed ' 'after %s.\nInstructions for updating:\n%s', _get_qualified_name(func), func.__module__, arg_name, date, instructions) return func(*args, **kwargs) new_func.__doc__ = _add_deprecated_arg_notice_to_docstring( func.__doc__, date, instructions) return new_func return deprecated_wrapper
[ "def", "deprecated_args", "(", "date", ",", "instructions", ",", "*", "deprecated_arg_names", ")", ":", "_validate_deprecation_args", "(", "date", ",", "instructions", ")", "if", "not", "deprecated_arg_names", ":", "raise", "ValueError", "(", "'Specify which argument is deprecated.'", ")", "def", "deprecated_wrapper", "(", "func", ")", ":", "\"\"\"Deprecation decorator.\"\"\"", "_validate_callable", "(", "func", ",", "'deprecated_args'", ")", "arg_spec", "=", "inspect", ".", "getargspec", "(", "func", ")", "deprecated_positions", "=", "[", "(", "i", ",", "arg_name", ")", "for", "(", "i", ",", "arg_name", ")", "in", "enumerate", "(", "arg_spec", ".", "args", ")", "if", "arg_name", "in", "deprecated_arg_names", "]", "is_varargs_deprecated", "=", "arg_spec", ".", "varargs", "in", "deprecated_arg_names", "is_kwargs_deprecated", "=", "arg_spec", ".", "keywords", "in", "deprecated_arg_names", "if", "(", "len", "(", "deprecated_positions", ")", "+", "is_varargs_deprecated", "+", "is_kwargs_deprecated", "!=", "len", "(", "deprecated_arg_names", ")", ")", ":", "known_args", "=", "arg_spec", ".", "args", "+", "[", "arg_spec", ".", "varargs", ",", "arg_spec", ".", "keywords", "]", "missing_args", "=", "[", "arg_name", "for", "arg_name", "in", "deprecated_arg_names", "if", "arg_name", "not", "in", "known_args", "]", "raise", "ValueError", "(", "'The following deprecated arguments are not present '", "'in the function signature: %s'", "%", "missing_args", ")", "@", "functools", ".", "wraps", "(", "func", ")", "def", "new_func", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"Deprecation wrapper.\"\"\"", "invalid_args", "=", "[", "]", "for", "(", "i", ",", "arg_name", ")", "in", "deprecated_positions", ":", "if", "i", "<", "len", "(", "args", ")", ":", "invalid_args", ".", "append", "(", "arg_name", ")", "if", "is_varargs_deprecated", "and", "len", "(", "args", ")", ">", "len", "(", "arg_spec", ".", "args", ")", ":", "invalid_args", ".", "append", "(", "arg_spec", ".", "varargs", ")", "if", "is_kwargs_deprecated", "and", "kwargs", ":", "invalid_args", ".", "append", "(", "arg_spec", ".", "keywords", ")", "for", "arg_name", "in", "deprecated_arg_names", ":", "if", "arg_name", "in", "kwargs", ":", "invalid_args", ".", "append", "(", "arg_name", ")", "for", "arg_name", "in", "invalid_args", ":", "logging", ".", "warning", "(", "'Calling %s (from %s) with %s is deprecated and will be removed '", "'after %s.\\nInstructions for updating:\\n%s'", ",", "_get_qualified_name", "(", "func", ")", ",", "func", ".", "__module__", ",", "arg_name", ",", "date", ",", "instructions", ")", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "new_func", ".", "__doc__", "=", "_add_deprecated_arg_notice_to_docstring", "(", "func", ".", "__doc__", ",", "date", ",", "instructions", ")", "return", "new_func", "return", "deprecated_wrapper" ]
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/contrib/framework/python/framework/deprecation.py#L145-L222
adobe/brackets-app
26dc31087a30bef7754b94e09fe7e74f6da9d699
src/mac/tools/change_mach_o_flags.py
python
WriteUInt32
(file, uint32, endian)
Writes |uint32| as an unsinged 32-bit integer to the file-like |file| object, treating it as having endianness specified by |endian| (per the |struct| module).
Writes |uint32| as an unsinged 32-bit integer to the file-like |file| object, treating it as having endianness specified by |endian| (per the |struct| module).
[ "Writes", "|uint32|", "as", "an", "unsinged", "32", "-", "bit", "integer", "to", "the", "file", "-", "like", "|file|", "object", "treating", "it", "as", "having", "endianness", "specified", "by", "|endian|", "(", "per", "the", "|struct|", "module", ")", "." ]
def WriteUInt32(file, uint32, endian): """Writes |uint32| as an unsinged 32-bit integer to the file-like |file| object, treating it as having endianness specified by |endian| (per the |struct| module).""" bytes = struct.pack(endian + 'I', uint32) assert len(bytes) == 4 file.write(bytes)
[ "def", "WriteUInt32", "(", "file", ",", "uint32", ",", "endian", ")", ":", "bytes", "=", "struct", ".", "pack", "(", "endian", "+", "'I'", ",", "uint32", ")", "assert", "len", "(", "bytes", ")", "==", "4", "file", ".", "write", "(", "bytes", ")" ]
https://github.com/adobe/brackets-app/blob/26dc31087a30bef7754b94e09fe7e74f6da9d699/src/mac/tools/change_mach_o_flags.py#L164-L172
ablab/spades
3a754192b88540524ce6fb69eef5ea9273a38465
assembler/ext/src/python_libs/joblib3/pool.py
python
_strided_from_memmap
(filename, dtype, mode, offset, order, shape, strides, total_buffer_len)
Reconstruct an array view on a memmory mapped file
Reconstruct an array view on a memmory mapped file
[ "Reconstruct", "an", "array", "view", "on", "a", "memmory", "mapped", "file" ]
def _strided_from_memmap(filename, dtype, mode, offset, order, shape, strides, total_buffer_len): """Reconstruct an array view on a memmory mapped file""" if mode == 'w+': # Do not zero the original data when unpickling mode = 'r+' if strides is None: # Simple, contiguous memmap return np.memmap(filename, dtype=dtype, shape=shape, mode=mode, offset=offset, order=order) else: # For non-contiguous data, memmap the total enclosing buffer and then # extract the non-contiguous view with the stride-tricks API base = np.memmap(filename, dtype=dtype, shape=total_buffer_len, mode=mode, offset=offset, order=order) return as_strided(base, shape=shape, strides=strides)
[ "def", "_strided_from_memmap", "(", "filename", ",", "dtype", ",", "mode", ",", "offset", ",", "order", ",", "shape", ",", "strides", ",", "total_buffer_len", ")", ":", "if", "mode", "==", "'w+'", ":", "# Do not zero the original data when unpickling", "mode", "=", "'r+'", "if", "strides", "is", "None", ":", "# Simple, contiguous memmap", "return", "np", ".", "memmap", "(", "filename", ",", "dtype", "=", "dtype", ",", "shape", "=", "shape", ",", "mode", "=", "mode", ",", "offset", "=", "offset", ",", "order", "=", "order", ")", "else", ":", "# For non-contiguous data, memmap the total enclosing buffer and then", "# extract the non-contiguous view with the stride-tricks API", "base", "=", "np", ".", "memmap", "(", "filename", ",", "dtype", "=", "dtype", ",", "shape", "=", "total_buffer_len", ",", "mode", "=", "mode", ",", "offset", "=", "offset", ",", "order", "=", "order", ")", "return", "as_strided", "(", "base", ",", "shape", "=", "shape", ",", "strides", "=", "strides", ")" ]
https://github.com/ablab/spades/blob/3a754192b88540524ce6fb69eef5ea9273a38465/assembler/ext/src/python_libs/joblib3/pool.py#L93-L109
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
scripts/abins/input/euphonicloader.py
python
EuphonicLoader.__init__
(self, input_ab_initio_filename)
:param input_ab_initio_filename: name of file with phonon data (foo.phonon)
[]
def __init__(self, input_ab_initio_filename): """ :param input_ab_initio_filename: name of file with phonon data (foo.phonon) """ if not isinstance(input_ab_initio_filename, str): raise TypeError('Filename must be a string') elif not Path(input_ab_initio_filename).is_file(): raise IOError(f'Ab initio file {input_ab_initio_filename} not found.') super().__init__(input_ab_initio_filename=input_ab_initio_filename) self._ab_initio_program = "FORCECONSTANTS"
[ "def", "__init__", "(", "self", ",", "input_ab_initio_filename", ")", ":", "if", "not", "isinstance", "(", "input_ab_initio_filename", ",", "str", ")", ":", "raise", "TypeError", "(", "'Filename must be a string'", ")", "elif", "not", "Path", "(", "input_ab_initio_filename", ")", ".", "is_file", "(", ")", ":", "raise", "IOError", "(", "f'Ab initio file {input_ab_initio_filename} not found.'", ")", "super", "(", ")", ".", "__init__", "(", "input_ab_initio_filename", "=", "input_ab_initio_filename", ")", "self", ".", "_ab_initio_program", "=", "\"FORCECONSTANTS\"" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/abins/input/euphonicloader.py#L19-L30
apache/incubator-mxnet
f03fb23f1d103fec9541b5ae59ee06b1734a51d9
python/mxnet/numpy/multiarray.py
python
may_share_memory
(a, b, max_work=None)
return _mx_nd_np.may_share_memory(a, b, max_work)
Determine if two arrays might share memory A return of True does not necessarily mean that the two arrays share any element. It just means that they *might*. Only the memory bounds of a and b are checked by default. Parameters ---------- a, b : ndarray Input arrays Returns ------- out : bool See Also -------- shares_memory Examples -------- >>> np.may_share_memory(np.array([1,2]), np.array([5,8,9])) False >>> x = np.zeros([3, 4]) >>> np.may_share_memory(x[:,0], x[:,1]) True .. note:: This function differs from the original `numpy.may_share_memory <https://docs.scipy.org/doc/numpy/reference/generated/numpy.may_share_memory.html>`_ in the following way(s): * Does not support `max_work`, it is a dummy argument * Actually it is same as `shares_memory` in MXNet np
Determine if two arrays might share memory
[ "Determine", "if", "two", "arrays", "might", "share", "memory" ]
def may_share_memory(a, b, max_work=None): """ Determine if two arrays might share memory A return of True does not necessarily mean that the two arrays share any element. It just means that they *might*. Only the memory bounds of a and b are checked by default. Parameters ---------- a, b : ndarray Input arrays Returns ------- out : bool See Also -------- shares_memory Examples -------- >>> np.may_share_memory(np.array([1,2]), np.array([5,8,9])) False >>> x = np.zeros([3, 4]) >>> np.may_share_memory(x[:,0], x[:,1]) True .. note:: This function differs from the original `numpy.may_share_memory <https://docs.scipy.org/doc/numpy/reference/generated/numpy.may_share_memory.html>`_ in the following way(s): * Does not support `max_work`, it is a dummy argument * Actually it is same as `shares_memory` in MXNet np """ return _mx_nd_np.may_share_memory(a, b, max_work)
[ "def", "may_share_memory", "(", "a", ",", "b", ",", "max_work", "=", "None", ")", ":", "return", "_mx_nd_np", ".", "may_share_memory", "(", "a", ",", "b", ",", "max_work", ")" ]
https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/numpy/multiarray.py#L11315-L11353
wyrover/book-code
7f4883d9030d553bc6bcfa3da685e34789839900
3rdparty/protobuf/python/google/protobuf/internal/containers.py
python
RepeatedCompositeFieldContainer.MergeFrom
(self, other)
Appends the contents of another repeated field of the same type to this one, copying each individual message.
Appends the contents of another repeated field of the same type to this one, copying each individual message.
[ "Appends", "the", "contents", "of", "another", "repeated", "field", "of", "the", "same", "type", "to", "this", "one", "copying", "each", "individual", "message", "." ]
def MergeFrom(self, other): """Appends the contents of another repeated field of the same type to this one, copying each individual message. """ self.extend(other._values)
[ "def", "MergeFrom", "(", "self", ",", "other", ")", ":", "self", ".", "extend", "(", "other", ".", "_values", ")" ]
https://github.com/wyrover/book-code/blob/7f4883d9030d553bc6bcfa3da685e34789839900/3rdparty/protobuf/python/google/protobuf/internal/containers.py#L393-L397
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/keras/saving/save.py
python
save_model
(model, filepath, overwrite=True, include_optimizer=True, save_format=None, signatures=None)
Saves a model as a TensorFlow SavedModel or HDF5 file. The saved model contains: - the model's configuration (topology) - the model's weights - the model's optimizer's state (if any) Thus the saved model can be reinstantiated in the exact same state, without any of the code used for model definition or training. _SavedModel serialization_ (not yet added) The SavedModel serialization path uses `tf.saved_model.save` to save the model and all trackable objects attached to the model (e.g. layers and variables). `@tf.function`-decorated methods are also saved. Additional trackable objects and functions are added to the SavedModel to allow the model to be loaded back as a Keras Model object. Arguments: model: Keras model instance to be saved. filepath: One of the following: - String, path where to save the model - `h5py.File` object where to save the model overwrite: Whether we should overwrite any existing model at the target location, or instead ask the user with a manual prompt. include_optimizer: If True, save optimizer's state together. save_format: Either 'tf' or 'h5', indicating whether to save the model to Tensorflow SavedModel or HDF5. Defaults to 'tf' in TF 2.X, and 'h5' in TF 1.X. signatures: Signatures to save with the SavedModel. Applicable to the 'tf' format only. Please see the `signatures` argument in `tf.saved_model.save` for details. Raises: ImportError: If save format is hdf5, and h5py is not available.
Saves a model as a TensorFlow SavedModel or HDF5 file.
[ "Saves", "a", "model", "as", "a", "TensorFlow", "SavedModel", "or", "HDF5", "file", "." ]
def save_model(model, filepath, overwrite=True, include_optimizer=True, save_format=None, signatures=None): """Saves a model as a TensorFlow SavedModel or HDF5 file. The saved model contains: - the model's configuration (topology) - the model's weights - the model's optimizer's state (if any) Thus the saved model can be reinstantiated in the exact same state, without any of the code used for model definition or training. _SavedModel serialization_ (not yet added) The SavedModel serialization path uses `tf.saved_model.save` to save the model and all trackable objects attached to the model (e.g. layers and variables). `@tf.function`-decorated methods are also saved. Additional trackable objects and functions are added to the SavedModel to allow the model to be loaded back as a Keras Model object. Arguments: model: Keras model instance to be saved. filepath: One of the following: - String, path where to save the model - `h5py.File` object where to save the model overwrite: Whether we should overwrite any existing model at the target location, or instead ask the user with a manual prompt. include_optimizer: If True, save optimizer's state together. save_format: Either 'tf' or 'h5', indicating whether to save the model to Tensorflow SavedModel or HDF5. Defaults to 'tf' in TF 2.X, and 'h5' in TF 1.X. signatures: Signatures to save with the SavedModel. Applicable to the 'tf' format only. Please see the `signatures` argument in `tf.saved_model.save` for details. Raises: ImportError: If save format is hdf5, and h5py is not available. """ from tensorflow.python.keras.engine import sequential # pylint: disable=g-import-not-at-top default_format = 'tf' if tf2.enabled() else 'h5' save_format = save_format or default_format if (save_format == 'h5' or (h5py is not None and isinstance(filepath, h5py.File)) or os.path.splitext(filepath)[1] in _HDF5_EXTENSIONS): # TODO(b/130258301): add utility method for detecting model type. if (not model._is_graph_network and # pylint:disable=protected-access not isinstance(model, sequential.Sequential)): raise NotImplementedError( 'Saving the model to HDF5 format requires the model to be a ' 'Functional model or a Sequential model. It does not work for ' 'subclassed models, because such models are defined via the body of ' 'a Python method, which isn\'t safely serializable. Consider saving ' 'to the Tensorflow SavedModel format (by setting save_format="tf") ' 'or using `save_weights`.') hdf5_format.save_model_to_hdf5( model, filepath, overwrite, include_optimizer) else: saved_model_save.save(model, filepath, overwrite, include_optimizer, signatures)
[ "def", "save_model", "(", "model", ",", "filepath", ",", "overwrite", "=", "True", ",", "include_optimizer", "=", "True", ",", "save_format", "=", "None", ",", "signatures", "=", "None", ")", ":", "from", "tensorflow", ".", "python", ".", "keras", ".", "engine", "import", "sequential", "# pylint: disable=g-import-not-at-top", "default_format", "=", "'tf'", "if", "tf2", ".", "enabled", "(", ")", "else", "'h5'", "save_format", "=", "save_format", "or", "default_format", "if", "(", "save_format", "==", "'h5'", "or", "(", "h5py", "is", "not", "None", "and", "isinstance", "(", "filepath", ",", "h5py", ".", "File", ")", ")", "or", "os", ".", "path", ".", "splitext", "(", "filepath", ")", "[", "1", "]", "in", "_HDF5_EXTENSIONS", ")", ":", "# TODO(b/130258301): add utility method for detecting model type.", "if", "(", "not", "model", ".", "_is_graph_network", "and", "# pylint:disable=protected-access", "not", "isinstance", "(", "model", ",", "sequential", ".", "Sequential", ")", ")", ":", "raise", "NotImplementedError", "(", "'Saving the model to HDF5 format requires the model to be a '", "'Functional model or a Sequential model. It does not work for '", "'subclassed models, because such models are defined via the body of '", "'a Python method, which isn\\'t safely serializable. Consider saving '", "'to the Tensorflow SavedModel format (by setting save_format=\"tf\") '", "'or using `save_weights`.'", ")", "hdf5_format", ".", "save_model_to_hdf5", "(", "model", ",", "filepath", ",", "overwrite", ",", "include_optimizer", ")", "else", ":", "saved_model_save", ".", "save", "(", "model", ",", "filepath", ",", "overwrite", ",", "include_optimizer", ",", "signatures", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/keras/saving/save.py#L47-L112
ceph/ceph
959663007321a369c83218414a29bd9dbc8bda3a
src/pybind/mgr/mirroring/fs/dir_map/policy.py
python
Policy.can_shuffle_dir
(self, dir_path)
return StateTransition.is_idle(dir_state.state) and \ (time.time() - dir_state['mapped_time']) > Policy.DIR_SHUFFLE_THROTTLE_INTERVAL
Right now, shuffle directories only based on idleness. Later, we probably want to avoid shuffling images that were recently shuffled.
Right now, shuffle directories only based on idleness. Later, we probably want to avoid shuffling images that were recently shuffled.
[ "Right", "now", "shuffle", "directories", "only", "based", "on", "idleness", ".", "Later", "we", "probably", "want", "to", "avoid", "shuffling", "images", "that", "were", "recently", "shuffled", "." ]
def can_shuffle_dir(self, dir_path): """Right now, shuffle directories only based on idleness. Later, we probably want to avoid shuffling images that were recently shuffled. """ log.debug(f'can_shuffle_dir: {dir_path}') dir_state = self.dir_states[dir_path] return StateTransition.is_idle(dir_state.state) and \ (time.time() - dir_state['mapped_time']) > Policy.DIR_SHUFFLE_THROTTLE_INTERVAL
[ "def", "can_shuffle_dir", "(", "self", ",", "dir_path", ")", ":", "log", ".", "debug", "(", "f'can_shuffle_dir: {dir_path}'", ")", "dir_state", "=", "self", ".", "dir_states", "[", "dir_path", "]", "return", "StateTransition", ".", "is_idle", "(", "dir_state", ".", "state", ")", "and", "(", "time", ".", "time", "(", ")", "-", "dir_state", "[", "'mapped_time'", "]", ")", ">", "Policy", ".", "DIR_SHUFFLE_THROTTLE_INTERVAL" ]
https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/src/pybind/mgr/mirroring/fs/dir_map/policy.py#L55-L62
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/python/ops/rnn_cell_impl.py
python
_linear
(args, output_size, bias, bias_initializer=None, kernel_initializer=None)
Linear map: sum_i(args[i] * W[i]), where W[i] is a variable. Args: args: a 2D Tensor or a list of 2D, batch, n, Tensors. output_size: int, second dimension of W[i]. bias: boolean, whether to add a bias term or not. bias_initializer: starting value to initialize the bias (default is all zeros). kernel_initializer: starting value to initialize the weight. Returns: A 2D Tensor with shape `[batch, output_size]` equal to sum_i(args[i] * W[i]), where W[i]s are newly created matrices. Raises: ValueError: if some of the arguments has unspecified or wrong shape.
Linear map: sum_i(args[i] * W[i]), where W[i] is a variable.
[ "Linear", "map", ":", "sum_i", "(", "args", "[", "i", "]", "*", "W", "[", "i", "]", ")", "where", "W", "[", "i", "]", "is", "a", "variable", "." ]
def _linear(args, output_size, bias, bias_initializer=None, kernel_initializer=None): """Linear map: sum_i(args[i] * W[i]), where W[i] is a variable. Args: args: a 2D Tensor or a list of 2D, batch, n, Tensors. output_size: int, second dimension of W[i]. bias: boolean, whether to add a bias term or not. bias_initializer: starting value to initialize the bias (default is all zeros). kernel_initializer: starting value to initialize the weight. Returns: A 2D Tensor with shape `[batch, output_size]` equal to sum_i(args[i] * W[i]), where W[i]s are newly created matrices. Raises: ValueError: if some of the arguments has unspecified or wrong shape. """ if args is None or (nest.is_sequence(args) and not args): raise ValueError("`args` must be specified") if not nest.is_sequence(args): args = [args] # Calculate the total size of arguments on dimension 1. total_arg_size = 0 shapes = [a.get_shape() for a in args] for shape in shapes: if shape.ndims != 2: raise ValueError("linear is expecting 2D arguments: %s" % shapes) if shape[1].value is None: raise ValueError("linear expects shape[1] to be provided for shape %s, " "but saw %s" % (shape, shape[1])) else: total_arg_size += shape[1].value dtype = [a.dtype for a in args][0] # Now the computation. scope = vs.get_variable_scope() with vs.variable_scope(scope) as outer_scope: weights = vs.get_variable( _WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size], dtype=dtype, initializer=kernel_initializer) if len(args) == 1: res = math_ops.matmul(args[0], weights) else: res = math_ops.matmul(array_ops.concat(args, 1), weights) if not bias: return res with vs.variable_scope(outer_scope) as inner_scope: inner_scope.set_partitioner(None) if bias_initializer is None: bias_initializer = init_ops.constant_initializer(0.0, dtype=dtype) biases = vs.get_variable( _BIAS_VARIABLE_NAME, [output_size], dtype=dtype, initializer=bias_initializer) return nn_ops.bias_add(res, biases)
[ "def", "_linear", "(", "args", ",", "output_size", ",", "bias", ",", "bias_initializer", "=", "None", ",", "kernel_initializer", "=", "None", ")", ":", "if", "args", "is", "None", "or", "(", "nest", ".", "is_sequence", "(", "args", ")", "and", "not", "args", ")", ":", "raise", "ValueError", "(", "\"`args` must be specified\"", ")", "if", "not", "nest", ".", "is_sequence", "(", "args", ")", ":", "args", "=", "[", "args", "]", "# Calculate the total size of arguments on dimension 1.", "total_arg_size", "=", "0", "shapes", "=", "[", "a", ".", "get_shape", "(", ")", "for", "a", "in", "args", "]", "for", "shape", "in", "shapes", ":", "if", "shape", ".", "ndims", "!=", "2", ":", "raise", "ValueError", "(", "\"linear is expecting 2D arguments: %s\"", "%", "shapes", ")", "if", "shape", "[", "1", "]", ".", "value", "is", "None", ":", "raise", "ValueError", "(", "\"linear expects shape[1] to be provided for shape %s, \"", "\"but saw %s\"", "%", "(", "shape", ",", "shape", "[", "1", "]", ")", ")", "else", ":", "total_arg_size", "+=", "shape", "[", "1", "]", ".", "value", "dtype", "=", "[", "a", ".", "dtype", "for", "a", "in", "args", "]", "[", "0", "]", "# Now the computation.", "scope", "=", "vs", ".", "get_variable_scope", "(", ")", "with", "vs", ".", "variable_scope", "(", "scope", ")", "as", "outer_scope", ":", "weights", "=", "vs", ".", "get_variable", "(", "_WEIGHTS_VARIABLE_NAME", ",", "[", "total_arg_size", ",", "output_size", "]", ",", "dtype", "=", "dtype", ",", "initializer", "=", "kernel_initializer", ")", "if", "len", "(", "args", ")", "==", "1", ":", "res", "=", "math_ops", ".", "matmul", "(", "args", "[", "0", "]", ",", "weights", ")", "else", ":", "res", "=", "math_ops", ".", "matmul", "(", "array_ops", ".", "concat", "(", "args", ",", "1", ")", ",", "weights", ")", "if", "not", "bias", ":", "return", "res", "with", "vs", ".", "variable_scope", "(", "outer_scope", ")", "as", "inner_scope", ":", "inner_scope", ".", "set_partitioner", "(", "None", ")", "if", "bias_initializer", "is", "None", ":", "bias_initializer", "=", "init_ops", ".", "constant_initializer", "(", "0.0", ",", "dtype", "=", "dtype", ")", "biases", "=", "vs", ".", "get_variable", "(", "_BIAS_VARIABLE_NAME", ",", "[", "output_size", "]", ",", "dtype", "=", "dtype", ",", "initializer", "=", "bias_initializer", ")", "return", "nn_ops", ".", "bias_add", "(", "res", ",", "biases", ")" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/ops/rnn_cell_impl.py#L1284-L1346
facebook/ThreatExchange
31914a51820c73c8a0daffe62ccca29a6e3d359e
api-reference-examples/python/pytx/pytx/malware.py
python
Malware.rfh
(self)
return rfh
Return a file handle of the base64-decoded and unzipped sample.
Return a file handle of the base64-decoded and unzipped sample.
[ "Return", "a", "file", "handle", "of", "the", "base64", "-", "decoded", "and", "unzipped", "sample", "." ]
def rfh(self): """ Return a file handle of the base64-decoded and unzipped sample. """ zfh = self.zfh rfh = io.BytesIO() with zipfile.ZipFile(zfh, 'r') as zf: for entry in zf.infolist(): rfh.write(zf.read(entry.filename, self.get(m.PASSWORD))) rfh.seek(0) return rfh
[ "def", "rfh", "(", "self", ")", ":", "zfh", "=", "self", ".", "zfh", "rfh", "=", "io", ".", "BytesIO", "(", ")", "with", "zipfile", ".", "ZipFile", "(", "zfh", ",", "'r'", ")", "as", "zf", ":", "for", "entry", "in", "zf", ".", "infolist", "(", ")", ":", "rfh", ".", "write", "(", "zf", ".", "read", "(", "entry", ".", "filename", ",", "self", ".", "get", "(", "m", ".", "PASSWORD", ")", ")", ")", "rfh", ".", "seek", "(", "0", ")", "return", "rfh" ]
https://github.com/facebook/ThreatExchange/blob/31914a51820c73c8a0daffe62ccca29a6e3d359e/api-reference-examples/python/pytx/pytx/malware.py#L105-L117
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/indexes/numeric.py
python
UInt64Index.inferred_type
(self)
return "integer"
Always 'integer' for ``UInt64Index``
Always 'integer' for ``UInt64Index``
[ "Always", "integer", "for", "UInt64Index" ]
def inferred_type(self) -> str: """ Always 'integer' for ``UInt64Index`` """ return "integer"
[ "def", "inferred_type", "(", "self", ")", "->", "str", ":", "return", "\"integer\"" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/indexes/numeric.py#L307-L311
intel/llvm
e6d0547e9d99b5a56430c4749f6c7e328bf221ab
clang/bindings/python/clang/cindex.py
python
Cursor.is_const_method
(self)
return conf.lib.clang_CXXMethod_isConst(self)
Returns True if the cursor refers to a C++ member function or member function template that is declared 'const'.
Returns True if the cursor refers to a C++ member function or member function template that is declared 'const'.
[ "Returns", "True", "if", "the", "cursor", "refers", "to", "a", "C", "++", "member", "function", "or", "member", "function", "template", "that", "is", "declared", "const", "." ]
def is_const_method(self): """Returns True if the cursor refers to a C++ member function or member function template that is declared 'const'. """ return conf.lib.clang_CXXMethod_isConst(self)
[ "def", "is_const_method", "(", "self", ")", ":", "return", "conf", ".", "lib", ".", "clang_CXXMethod_isConst", "(", "self", ")" ]
https://github.com/intel/llvm/blob/e6d0547e9d99b5a56430c4749f6c7e328bf221ab/clang/bindings/python/clang/cindex.py#L1444-L1448
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/requests/cookies.py
python
morsel_to_cookie
(morsel)
return create_cookie( comment=morsel['comment'], comment_url=bool(morsel['comment']), discard=False, domain=morsel['domain'], expires=expires, name=morsel.key, path=morsel['path'], port=None, rest={'HttpOnly': morsel['httponly']}, rfc2109=False, secure=bool(morsel['secure']), value=morsel.value, version=morsel['version'] or 0, )
Convert a Morsel object into a Cookie containing the one k/v pair.
Convert a Morsel object into a Cookie containing the one k/v pair.
[ "Convert", "a", "Morsel", "object", "into", "a", "Cookie", "containing", "the", "one", "k", "/", "v", "pair", "." ]
def morsel_to_cookie(morsel): """Convert a Morsel object into a Cookie containing the one k/v pair.""" expires = None if morsel['max-age']: try: expires = int(time.time() + int(morsel['max-age'])) except ValueError: raise TypeError('max-age: %s must be integer' % morsel['max-age']) elif morsel['expires']: time_template = '%a, %d-%b-%Y %H:%M:%S GMT' expires = calendar.timegm( time.strptime(morsel['expires'], time_template) ) return create_cookie( comment=morsel['comment'], comment_url=bool(morsel['comment']), discard=False, domain=morsel['domain'], expires=expires, name=morsel.key, path=morsel['path'], port=None, rest={'HttpOnly': morsel['httponly']}, rfc2109=False, secure=bool(morsel['secure']), value=morsel.value, version=morsel['version'] or 0, )
[ "def", "morsel_to_cookie", "(", "morsel", ")", ":", "expires", "=", "None", "if", "morsel", "[", "'max-age'", "]", ":", "try", ":", "expires", "=", "int", "(", "time", ".", "time", "(", ")", "+", "int", "(", "morsel", "[", "'max-age'", "]", ")", ")", "except", "ValueError", ":", "raise", "TypeError", "(", "'max-age: %s must be integer'", "%", "morsel", "[", "'max-age'", "]", ")", "elif", "morsel", "[", "'expires'", "]", ":", "time_template", "=", "'%a, %d-%b-%Y %H:%M:%S GMT'", "expires", "=", "calendar", ".", "timegm", "(", "time", ".", "strptime", "(", "morsel", "[", "'expires'", "]", ",", "time_template", ")", ")", "return", "create_cookie", "(", "comment", "=", "morsel", "[", "'comment'", "]", ",", "comment_url", "=", "bool", "(", "morsel", "[", "'comment'", "]", ")", ",", "discard", "=", "False", ",", "domain", "=", "morsel", "[", "'domain'", "]", ",", "expires", "=", "expires", ",", "name", "=", "morsel", ".", "key", ",", "path", "=", "morsel", "[", "'path'", "]", ",", "port", "=", "None", ",", "rest", "=", "{", "'HttpOnly'", ":", "morsel", "[", "'httponly'", "]", "}", ",", "rfc2109", "=", "False", ",", "secure", "=", "bool", "(", "morsel", "[", "'secure'", "]", ")", ",", "value", "=", "morsel", ".", "value", ",", "version", "=", "morsel", "[", "'version'", "]", "or", "0", ",", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/requests/cookies.py#L953-L1009
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
deps/src/libxml2-2.9.1/python/libxml2class.py
python
uCSIsCatPi
(code)
return ret
Check whether the character is part of Pi UCS Category
Check whether the character is part of Pi UCS Category
[ "Check", "whether", "the", "character", "is", "part", "of", "Pi", "UCS", "Category" ]
def uCSIsCatPi(code): """Check whether the character is part of Pi UCS Category """ ret = libxml2mod.xmlUCSIsCatPi(code) return ret
[ "def", "uCSIsCatPi", "(", "code", ")", ":", "ret", "=", "libxml2mod", ".", "xmlUCSIsCatPi", "(", "code", ")", "return", "ret" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L1578-L1581
ValveSoftware/source-sdk-2013
0d8dceea4310fde5706b3ce1c70609d72a38efdf
sp/src/thirdparty/protobuf-2.3.0/python/mox.py
python
Func.__init__
(self, func)
Initialize. Args: func: callable that takes one parameter and returns a bool
Initialize.
[ "Initialize", "." ]
def __init__(self, func): """Initialize. Args: func: callable that takes one parameter and returns a bool """ self._func = func
[ "def", "__init__", "(", "self", ",", "func", ")", ":", "self", ".", "_func", "=", "func" ]
https://github.com/ValveSoftware/source-sdk-2013/blob/0d8dceea4310fde5706b3ce1c70609d72a38efdf/sp/src/thirdparty/protobuf-2.3.0/python/mox.py#L1129-L1136
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_core.py
python
FileSystemHandler.GetRightLocation
(*args, **kwargs)
return _core_.FileSystemHandler_GetRightLocation(*args, **kwargs)
GetRightLocation(String location) -> String
GetRightLocation(String location) -> String
[ "GetRightLocation", "(", "String", "location", ")", "-", ">", "String" ]
def GetRightLocation(*args, **kwargs): """GetRightLocation(String location) -> String""" return _core_.FileSystemHandler_GetRightLocation(*args, **kwargs)
[ "def", "GetRightLocation", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "FileSystemHandler_GetRightLocation", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L2375-L2377
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/urllib/robotparser.py
python
RobotFileParser.modified
(self)
Sets the time the robots.txt file was last fetched to the current time.
Sets the time the robots.txt file was last fetched to the current time.
[ "Sets", "the", "time", "the", "robots", ".", "txt", "file", "was", "last", "fetched", "to", "the", "current", "time", "." ]
def modified(self): """Sets the time the robots.txt file was last fetched to the current time. """ import time self.last_checked = time.time()
[ "def", "modified", "(", "self", ")", ":", "import", "time", "self", ".", "last_checked", "=", "time", ".", "time", "(", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/urllib/robotparser.py#L46-L52
JumpingYang001/webrtc
c03d6e965e1f54aeadd670e491eabe5fdb8db968
tools_webrtc/vim/webrtc.ycm_extra_conf.py
python
GetNinjaBuildOutputsForSourceFile
(out_dir, filename)
return [ target for target in output_lines if target and (target.endswith('.o') or target.endswith('.obj')) ]
Returns a list of build outputs for filename. The list is generated by invoking 'ninja -t query' tool to retrieve a list of inputs and outputs of |filename|. This list is then filtered to only include .o and .obj outputs. Args: out_dir: (String) Absolute path to ninja build output directory. filename: (String) Absolute path to source file. Returns: (List of Strings) List of target names. Will return [] if |filename| doesn't yield any .o or .obj outputs.
Returns a list of build outputs for filename.
[ "Returns", "a", "list", "of", "build", "outputs", "for", "filename", "." ]
def GetNinjaBuildOutputsForSourceFile(out_dir, filename): """Returns a list of build outputs for filename. The list is generated by invoking 'ninja -t query' tool to retrieve a list of inputs and outputs of |filename|. This list is then filtered to only include .o and .obj outputs. Args: out_dir: (String) Absolute path to ninja build output directory. filename: (String) Absolute path to source file. Returns: (List of Strings) List of target names. Will return [] if |filename| doesn't yield any .o or .obj outputs. """ # Ninja needs the path to the source file relative to the output build # directory. rel_filename = os.path.relpath(filename, out_dir) p = subprocess.Popen(['ninja', '-C', out_dir, '-t', 'query', rel_filename], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) stdout, _ = p.communicate() if p.returncode != 0: return [] # The output looks like: # ../../relative/path/to/source.cc: # outputs: # obj/reative/path/to/target.source.o # obj/some/other/target2.source.o # another/target.txt # outputs_text = stdout.partition('\n outputs:\n')[2] output_lines = [line.strip() for line in outputs_text.split('\n')] return [ target for target in output_lines if target and (target.endswith('.o') or target.endswith('.obj')) ]
[ "def", "GetNinjaBuildOutputsForSourceFile", "(", "out_dir", ",", "filename", ")", ":", "# Ninja needs the path to the source file relative to the output build", "# directory.", "rel_filename", "=", "os", ".", "path", ".", "relpath", "(", "filename", ",", "out_dir", ")", "p", "=", "subprocess", ".", "Popen", "(", "[", "'ninja'", ",", "'-C'", ",", "out_dir", ",", "'-t'", ",", "'query'", ",", "rel_filename", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "universal_newlines", "=", "True", ")", "stdout", ",", "_", "=", "p", ".", "communicate", "(", ")", "if", "p", ".", "returncode", "!=", "0", ":", "return", "[", "]", "# The output looks like:", "# ../../relative/path/to/source.cc:", "# outputs:", "# obj/reative/path/to/target.source.o", "# obj/some/other/target2.source.o", "# another/target.txt", "#", "outputs_text", "=", "stdout", ".", "partition", "(", "'\\n outputs:\\n'", ")", "[", "2", "]", "output_lines", "=", "[", "line", ".", "strip", "(", ")", "for", "line", "in", "outputs_text", ".", "split", "(", "'\\n'", ")", "]", "return", "[", "target", "for", "target", "in", "output_lines", "if", "target", "and", "(", "target", ".", "endswith", "(", "'.o'", ")", "or", "target", ".", "endswith", "(", "'.obj'", ")", ")", "]" ]
https://github.com/JumpingYang001/webrtc/blob/c03d6e965e1f54aeadd670e491eabe5fdb8db968/tools_webrtc/vim/webrtc.ycm_extra_conf.py#L123-L162
perilouswithadollarsign/cstrike15_src
f82112a2388b841d72cb62ca48ab1846dfcc11c8
thirdparty/protobuf-2.5.0/python/google/protobuf/service_reflection.py
python
GeneratedServiceType.__init__
(cls, name, bases, dictionary)
Creates a message service class. Args: name: Name of the class (ignored, but required by the metaclass protocol). bases: Base classes of the class being constructed. dictionary: The class dictionary of the class being constructed. dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object describing this protocol service type.
Creates a message service class.
[ "Creates", "a", "message", "service", "class", "." ]
def __init__(cls, name, bases, dictionary): """Creates a message service class. Args: name: Name of the class (ignored, but required by the metaclass protocol). bases: Base classes of the class being constructed. dictionary: The class dictionary of the class being constructed. dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object describing this protocol service type. """ # Don't do anything if this class doesn't have a descriptor. This happens # when a service class is subclassed. if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: return descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] service_builder = _ServiceBuilder(descriptor) service_builder.BuildService(cls)
[ "def", "__init__", "(", "cls", ",", "name", ",", "bases", ",", "dictionary", ")", ":", "# Don't do anything if this class doesn't have a descriptor. This happens", "# when a service class is subclassed.", "if", "GeneratedServiceType", ".", "_DESCRIPTOR_KEY", "not", "in", "dictionary", ":", "return", "descriptor", "=", "dictionary", "[", "GeneratedServiceType", ".", "_DESCRIPTOR_KEY", "]", "service_builder", "=", "_ServiceBuilder", "(", "descriptor", ")", "service_builder", ".", "BuildService", "(", "cls", ")" ]
https://github.com/perilouswithadollarsign/cstrike15_src/blob/f82112a2388b841d72cb62ca48ab1846dfcc11c8/thirdparty/protobuf-2.5.0/python/google/protobuf/service_reflection.py#L64-L81
MVIG-SJTU/RMPE
5188c230ec800c12be7369c3619615bc9b020aa4
scripts/cpp_lint.py
python
CheckBraces
(filename, clean_lines, linenum, error)
Looks for misplaced braces (e.g. at the end of line). Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Looks for misplaced braces (e.g. at the end of line).
[ "Looks", "for", "misplaced", "braces", "(", "e", ".", "g", ".", "at", "the", "end", "of", "line", ")", "." ]
def CheckBraces(filename, clean_lines, linenum, error): """Looks for misplaced braces (e.g. at the end of line). Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ line = clean_lines.elided[linenum] # get rid of comments and strings if Match(r'\s*{\s*$', line): # We allow an open brace to start a line in the case where someone is using # braces in a block to explicitly create a new scope, which is commonly used # to control the lifetime of stack-allocated variables. Braces are also # used for brace initializers inside function calls. We don't detect this # perfectly: we just don't complain if the last non-whitespace character on # the previous non-blank line is ',', ';', ':', '(', '{', or '}', or if the # previous line starts a preprocessor block. prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0] if (not Search(r'[,;:}{(]\s*$', prevline) and not Match(r'\s*#', prevline)): error(filename, linenum, 'whitespace/braces', 4, '{ should almost always be at the end of the previous line') # An else clause should be on the same line as the preceding closing brace. if Match(r'\s*else\s*', line): prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0] if Match(r'\s*}\s*$', prevline): error(filename, linenum, 'whitespace/newline', 4, 'An else should appear on the same line as the preceding }') # If braces come on one side of an else, they should be on both. # However, we have to worry about "else if" that spans multiple lines! if Search(r'}\s*else[^{]*$', line) or Match(r'[^}]*else\s*{', line): if Search(r'}\s*else if([^{]*)$', line): # could be multi-line if # find the ( after the if pos = line.find('else if') pos = line.find('(', pos) if pos > 0: (endline, _, endpos) = CloseExpression(clean_lines, linenum, pos) if endline[endpos:].find('{') == -1: # must be brace after if error(filename, linenum, 'readability/braces', 5, 'If an else has a brace on one side, it should have it on both') else: # common case: else not followed by a multi-line if error(filename, linenum, 'readability/braces', 5, 'If an else has a brace on one side, it should have it on both') # Likewise, an else should never have the else clause on the same line if Search(r'\belse [^\s{]', line) and not Search(r'\belse if\b', line): error(filename, linenum, 'whitespace/newline', 4, 'Else clause should never be on same line as else (use 2 lines)') # In the same way, a do/while should never be on one line if Match(r'\s*do [^\s{]', line): error(filename, linenum, 'whitespace/newline', 4, 'do/while clauses should not be on a single line') # Block bodies should not be followed by a semicolon. Due to C++11 # brace initialization, there are more places where semicolons are # required than not, so we use a whitelist approach to check these # rather than a blacklist. These are the places where "};" should # be replaced by just "}": # 1. Some flavor of block following closing parenthesis: # for (;;) {}; # while (...) {}; # switch (...) {}; # Function(...) {}; # if (...) {}; # if (...) else if (...) {}; # # 2. else block: # if (...) else {}; # # 3. const member function: # Function(...) const {}; # # 4. Block following some statement: # x = 42; # {}; # # 5. Block at the beginning of a function: # Function(...) { # {}; # } # # Note that naively checking for the preceding "{" will also match # braces inside multi-dimensional arrays, but this is fine since # that expression will not contain semicolons. # # 6. Block following another block: # while (true) {} # {}; # # 7. End of namespaces: # namespace {}; # # These semicolons seems far more common than other kinds of # redundant semicolons, possibly due to people converting classes # to namespaces. For now we do not warn for this case. # # Try matching case 1 first. match = Match(r'^(.*\)\s*)\{', line) if match: # Matched closing parenthesis (case 1). Check the token before the # matching opening parenthesis, and don't warn if it looks like a # macro. This avoids these false positives: # - macro that defines a base class # - multi-line macro that defines a base class # - macro that defines the whole class-head # # But we still issue warnings for macros that we know are safe to # warn, specifically: # - TEST, TEST_F, TEST_P, MATCHER, MATCHER_P # - TYPED_TEST # - INTERFACE_DEF # - EXCLUSIVE_LOCKS_REQUIRED, SHARED_LOCKS_REQUIRED, LOCKS_EXCLUDED: # # We implement a whitelist of safe macros instead of a blacklist of # unsafe macros, even though the latter appears less frequently in # google code and would have been easier to implement. This is because # the downside for getting the whitelist wrong means some extra # semicolons, while the downside for getting the blacklist wrong # would result in compile errors. # # In addition to macros, we also don't want to warn on compound # literals. closing_brace_pos = match.group(1).rfind(')') opening_parenthesis = ReverseCloseExpression( clean_lines, linenum, closing_brace_pos) if opening_parenthesis[2] > -1: line_prefix = opening_parenthesis[0][0:opening_parenthesis[2]] macro = Search(r'\b([A-Z_]+)\s*$', line_prefix) if ((macro and macro.group(1) not in ( 'TEST', 'TEST_F', 'MATCHER', 'MATCHER_P', 'TYPED_TEST', 'EXCLUSIVE_LOCKS_REQUIRED', 'SHARED_LOCKS_REQUIRED', 'LOCKS_EXCLUDED', 'INTERFACE_DEF')) or Search(r'\s+=\s*$', line_prefix)): match = None else: # Try matching cases 2-3. match = Match(r'^(.*(?:else|\)\s*const)\s*)\{', line) if not match: # Try matching cases 4-6. These are always matched on separate lines. # # Note that we can't simply concatenate the previous line to the # current line and do a single match, otherwise we may output # duplicate warnings for the blank line case: # if (cond) { # // blank line # } prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0] if prevline and Search(r'[;{}]\s*$', prevline): match = Match(r'^(\s*)\{', line) # Check matching closing brace if match: (endline, endlinenum, endpos) = CloseExpression( clean_lines, linenum, len(match.group(1))) if endpos > -1 and Match(r'^\s*;', endline[endpos:]): # Current {} pair is eligible for semicolon check, and we have found # the redundant semicolon, output warning here. # # Note: because we are scanning forward for opening braces, and # outputting warnings for the matching closing brace, if there are # nested blocks with trailing semicolons, we will get the error # messages in reversed order. error(filename, endlinenum, 'readability/braces', 4, "You don't need a ; after a }")
[ "def", "CheckBraces", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "# get rid of comments and strings", "if", "Match", "(", "r'\\s*{\\s*$'", ",", "line", ")", ":", "# We allow an open brace to start a line in the case where someone is using", "# braces in a block to explicitly create a new scope, which is commonly used", "# to control the lifetime of stack-allocated variables. Braces are also", "# used for brace initializers inside function calls. We don't detect this", "# perfectly: we just don't complain if the last non-whitespace character on", "# the previous non-blank line is ',', ';', ':', '(', '{', or '}', or if the", "# previous line starts a preprocessor block.", "prevline", "=", "GetPreviousNonBlankLine", "(", "clean_lines", ",", "linenum", ")", "[", "0", "]", "if", "(", "not", "Search", "(", "r'[,;:}{(]\\s*$'", ",", "prevline", ")", "and", "not", "Match", "(", "r'\\s*#'", ",", "prevline", ")", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'whitespace/braces'", ",", "4", ",", "'{ should almost always be at the end of the previous line'", ")", "# An else clause should be on the same line as the preceding closing brace.", "if", "Match", "(", "r'\\s*else\\s*'", ",", "line", ")", ":", "prevline", "=", "GetPreviousNonBlankLine", "(", "clean_lines", ",", "linenum", ")", "[", "0", "]", "if", "Match", "(", "r'\\s*}\\s*$'", ",", "prevline", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'whitespace/newline'", ",", "4", ",", "'An else should appear on the same line as the preceding }'", ")", "# If braces come on one side of an else, they should be on both.", "# However, we have to worry about \"else if\" that spans multiple lines!", "if", "Search", "(", "r'}\\s*else[^{]*$'", ",", "line", ")", "or", "Match", "(", "r'[^}]*else\\s*{'", ",", "line", ")", ":", "if", "Search", "(", "r'}\\s*else if([^{]*)$'", ",", "line", ")", ":", "# could be multi-line if", "# find the ( after the if", "pos", "=", "line", ".", "find", "(", "'else if'", ")", "pos", "=", "line", ".", "find", "(", "'('", ",", "pos", ")", "if", "pos", ">", "0", ":", "(", "endline", ",", "_", ",", "endpos", ")", "=", "CloseExpression", "(", "clean_lines", ",", "linenum", ",", "pos", ")", "if", "endline", "[", "endpos", ":", "]", ".", "find", "(", "'{'", ")", "==", "-", "1", ":", "# must be brace after if", "error", "(", "filename", ",", "linenum", ",", "'readability/braces'", ",", "5", ",", "'If an else has a brace on one side, it should have it on both'", ")", "else", ":", "# common case: else not followed by a multi-line if", "error", "(", "filename", ",", "linenum", ",", "'readability/braces'", ",", "5", ",", "'If an else has a brace on one side, it should have it on both'", ")", "# Likewise, an else should never have the else clause on the same line", "if", "Search", "(", "r'\\belse [^\\s{]'", ",", "line", ")", "and", "not", "Search", "(", "r'\\belse if\\b'", ",", "line", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'whitespace/newline'", ",", "4", ",", "'Else clause should never be on same line as else (use 2 lines)'", ")", "# In the same way, a do/while should never be on one line", "if", "Match", "(", "r'\\s*do [^\\s{]'", ",", "line", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'whitespace/newline'", ",", "4", ",", "'do/while clauses should not be on a single line'", ")", "# Block bodies should not be followed by a semicolon. Due to C++11", "# brace initialization, there are more places where semicolons are", "# required than not, so we use a whitelist approach to check these", "# rather than a blacklist. These are the places where \"};\" should", "# be replaced by just \"}\":", "# 1. Some flavor of block following closing parenthesis:", "# for (;;) {};", "# while (...) {};", "# switch (...) {};", "# Function(...) {};", "# if (...) {};", "# if (...) else if (...) {};", "#", "# 2. else block:", "# if (...) else {};", "#", "# 3. const member function:", "# Function(...) const {};", "#", "# 4. Block following some statement:", "# x = 42;", "# {};", "#", "# 5. Block at the beginning of a function:", "# Function(...) {", "# {};", "# }", "#", "# Note that naively checking for the preceding \"{\" will also match", "# braces inside multi-dimensional arrays, but this is fine since", "# that expression will not contain semicolons.", "#", "# 6. Block following another block:", "# while (true) {}", "# {};", "#", "# 7. End of namespaces:", "# namespace {};", "#", "# These semicolons seems far more common than other kinds of", "# redundant semicolons, possibly due to people converting classes", "# to namespaces. For now we do not warn for this case.", "#", "# Try matching case 1 first.", "match", "=", "Match", "(", "r'^(.*\\)\\s*)\\{'", ",", "line", ")", "if", "match", ":", "# Matched closing parenthesis (case 1). Check the token before the", "# matching opening parenthesis, and don't warn if it looks like a", "# macro. This avoids these false positives:", "# - macro that defines a base class", "# - multi-line macro that defines a base class", "# - macro that defines the whole class-head", "#", "# But we still issue warnings for macros that we know are safe to", "# warn, specifically:", "# - TEST, TEST_F, TEST_P, MATCHER, MATCHER_P", "# - TYPED_TEST", "# - INTERFACE_DEF", "# - EXCLUSIVE_LOCKS_REQUIRED, SHARED_LOCKS_REQUIRED, LOCKS_EXCLUDED:", "#", "# We implement a whitelist of safe macros instead of a blacklist of", "# unsafe macros, even though the latter appears less frequently in", "# google code and would have been easier to implement. This is because", "# the downside for getting the whitelist wrong means some extra", "# semicolons, while the downside for getting the blacklist wrong", "# would result in compile errors.", "#", "# In addition to macros, we also don't want to warn on compound", "# literals.", "closing_brace_pos", "=", "match", ".", "group", "(", "1", ")", ".", "rfind", "(", "')'", ")", "opening_parenthesis", "=", "ReverseCloseExpression", "(", "clean_lines", ",", "linenum", ",", "closing_brace_pos", ")", "if", "opening_parenthesis", "[", "2", "]", ">", "-", "1", ":", "line_prefix", "=", "opening_parenthesis", "[", "0", "]", "[", "0", ":", "opening_parenthesis", "[", "2", "]", "]", "macro", "=", "Search", "(", "r'\\b([A-Z_]+)\\s*$'", ",", "line_prefix", ")", "if", "(", "(", "macro", "and", "macro", ".", "group", "(", "1", ")", "not", "in", "(", "'TEST'", ",", "'TEST_F'", ",", "'MATCHER'", ",", "'MATCHER_P'", ",", "'TYPED_TEST'", ",", "'EXCLUSIVE_LOCKS_REQUIRED'", ",", "'SHARED_LOCKS_REQUIRED'", ",", "'LOCKS_EXCLUDED'", ",", "'INTERFACE_DEF'", ")", ")", "or", "Search", "(", "r'\\s+=\\s*$'", ",", "line_prefix", ")", ")", ":", "match", "=", "None", "else", ":", "# Try matching cases 2-3.", "match", "=", "Match", "(", "r'^(.*(?:else|\\)\\s*const)\\s*)\\{'", ",", "line", ")", "if", "not", "match", ":", "# Try matching cases 4-6. These are always matched on separate lines.", "#", "# Note that we can't simply concatenate the previous line to the", "# current line and do a single match, otherwise we may output", "# duplicate warnings for the blank line case:", "# if (cond) {", "# // blank line", "# }", "prevline", "=", "GetPreviousNonBlankLine", "(", "clean_lines", ",", "linenum", ")", "[", "0", "]", "if", "prevline", "and", "Search", "(", "r'[;{}]\\s*$'", ",", "prevline", ")", ":", "match", "=", "Match", "(", "r'^(\\s*)\\{'", ",", "line", ")", "# Check matching closing brace", "if", "match", ":", "(", "endline", ",", "endlinenum", ",", "endpos", ")", "=", "CloseExpression", "(", "clean_lines", ",", "linenum", ",", "len", "(", "match", ".", "group", "(", "1", ")", ")", ")", "if", "endpos", ">", "-", "1", "and", "Match", "(", "r'^\\s*;'", ",", "endline", "[", "endpos", ":", "]", ")", ":", "# Current {} pair is eligible for semicolon check, and we have found", "# the redundant semicolon, output warning here.", "#", "# Note: because we are scanning forward for opening braces, and", "# outputting warnings for the matching closing brace, if there are", "# nested blocks with trailing semicolons, we will get the error", "# messages in reversed order.", "error", "(", "filename", ",", "endlinenum", ",", "'readability/braces'", ",", "4", ",", "\"You don't need a ; after a }\"", ")" ]
https://github.com/MVIG-SJTU/RMPE/blob/5188c230ec800c12be7369c3619615bc9b020aa4/scripts/cpp_lint.py#L3073-L3244
rdkit/rdkit
ede860ae316d12d8568daf5ee800921c3389c84e
rdkit/ML/Cluster/Clusters.py
python
Cluster.AddChildren
(self, children)
Adds a bunch of children to our list **Arguments** - children: a list of Clusters
Adds a bunch of children to our list
[ "Adds", "a", "bunch", "of", "children", "to", "our", "list" ]
def AddChildren(self, children): """Adds a bunch of children to our list **Arguments** - children: a list of Clusters """ self.children += children self._GenPoints() self._UpdateLength()
[ "def", "AddChildren", "(", "self", ",", "children", ")", ":", "self", ".", "children", "+=", "children", "self", ".", "_GenPoints", "(", ")", "self", ".", "_UpdateLength", "(", ")" ]
https://github.com/rdkit/rdkit/blob/ede860ae316d12d8568daf5ee800921c3389c84e/rdkit/ML/Cluster/Clusters.py#L162-L172
miyosuda/TensorFlowAndroidDemo
35903e0221aa5f109ea2dbef27f20b52e317f42d
jni-build/jni/include/tensorflow/python/framework/common_shapes.py
python
depthwise_conv2d_native_shape
(op)
return [tensor_shape.TensorShape([batch_size, out_rows, out_cols, depth_out])]
Shape function for a DepthwiseConv2D op. This op has two inputs: * input, a 4D tensor with shape = [batch_size, rows, cols, depth_in] * filter, a 4D tensor with shape = [filter_rows, filter_cols, depth_in, depthwise_multiplier] The output is a 4D tensor with shape = [batch_size, out_rows, out_cols, depth_in*depthwise_multiplier], where out_rows and out_cols depend on the value of the op's "padding" and "strides" attrs. Args: op: A DepthwiseConv2dNative Operation. Returns: A list containing the Shape of the DepthwiseConv2DNative output. Raises: ValueError: If the shapes of the input or filter are incompatible.
Shape function for a DepthwiseConv2D op.
[ "Shape", "function", "for", "a", "DepthwiseConv2D", "op", "." ]
def depthwise_conv2d_native_shape(op): """Shape function for a DepthwiseConv2D op. This op has two inputs: * input, a 4D tensor with shape = [batch_size, rows, cols, depth_in] * filter, a 4D tensor with shape = [filter_rows, filter_cols, depth_in, depthwise_multiplier] The output is a 4D tensor with shape = [batch_size, out_rows, out_cols, depth_in*depthwise_multiplier], where out_rows and out_cols depend on the value of the op's "padding" and "strides" attrs. Args: op: A DepthwiseConv2dNative Operation. Returns: A list containing the Shape of the DepthwiseConv2DNative output. Raises: ValueError: If the shapes of the input or filter are incompatible. """ input_shape = op.inputs[0].get_shape().with_rank(4) filter_shape = op.inputs[1].get_shape().with_rank(4) batch_size = input_shape[0] in_rows = input_shape[1] in_cols = input_shape[2] filter_rows = filter_shape[0] filter_cols = filter_shape[1] depth_out = filter_shape[3] * filter_shape[2] # Check that the input depths are compatible. input_shape[3].assert_is_compatible_with(filter_shape[2]) stride_b, stride_r, stride_c, stride_d = op.get_attr("strides") if stride_b != 1 or stride_d != 1: raise ValueError("Current implementation does not yet support " "strides in the batch and depth dimensions.") if stride_r != stride_c: # TODO(shlens): Add support for this. raise ValueError("Current implementation only supports equal length " "strides in the row and column dimensions.") # TODO(mrry,shlens): Raise an error if the stride would cause # information in the input to be ignored. This will require a change # in the kernel implementation. stride = stride_r padding = op.get_attr("padding") out_rows, out_cols = get2d_conv_output_size(in_rows, in_cols, filter_rows, filter_cols, stride, stride, padding) return [tensor_shape.TensorShape([batch_size, out_rows, out_cols, depth_out])]
[ "def", "depthwise_conv2d_native_shape", "(", "op", ")", ":", "input_shape", "=", "op", ".", "inputs", "[", "0", "]", ".", "get_shape", "(", ")", ".", "with_rank", "(", "4", ")", "filter_shape", "=", "op", ".", "inputs", "[", "1", "]", ".", "get_shape", "(", ")", ".", "with_rank", "(", "4", ")", "batch_size", "=", "input_shape", "[", "0", "]", "in_rows", "=", "input_shape", "[", "1", "]", "in_cols", "=", "input_shape", "[", "2", "]", "filter_rows", "=", "filter_shape", "[", "0", "]", "filter_cols", "=", "filter_shape", "[", "1", "]", "depth_out", "=", "filter_shape", "[", "3", "]", "*", "filter_shape", "[", "2", "]", "# Check that the input depths are compatible.", "input_shape", "[", "3", "]", ".", "assert_is_compatible_with", "(", "filter_shape", "[", "2", "]", ")", "stride_b", ",", "stride_r", ",", "stride_c", ",", "stride_d", "=", "op", ".", "get_attr", "(", "\"strides\"", ")", "if", "stride_b", "!=", "1", "or", "stride_d", "!=", "1", ":", "raise", "ValueError", "(", "\"Current implementation does not yet support \"", "\"strides in the batch and depth dimensions.\"", ")", "if", "stride_r", "!=", "stride_c", ":", "# TODO(shlens): Add support for this.", "raise", "ValueError", "(", "\"Current implementation only supports equal length \"", "\"strides in the row and column dimensions.\"", ")", "# TODO(mrry,shlens): Raise an error if the stride would cause", "# information in the input to be ignored. This will require a change", "# in the kernel implementation.", "stride", "=", "stride_r", "padding", "=", "op", ".", "get_attr", "(", "\"padding\"", ")", "out_rows", ",", "out_cols", "=", "get2d_conv_output_size", "(", "in_rows", ",", "in_cols", ",", "filter_rows", ",", "filter_cols", ",", "stride", ",", "stride", ",", "padding", ")", "return", "[", "tensor_shape", ".", "TensorShape", "(", "[", "batch_size", ",", "out_rows", ",", "out_cols", ",", "depth_out", "]", ")", "]" ]
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/python/framework/common_shapes.py#L256-L309
ArduPilot/ardupilot
6e684b3496122b8158ac412b609d00004b7ac306
Tools/scripts/build_binaries.py
python
build_binaries.touch_filepath
(self, filepath)
creates a file at filepath, or updates the timestamp on filepath
creates a file at filepath, or updates the timestamp on filepath
[ "creates", "a", "file", "at", "filepath", "or", "updates", "the", "timestamp", "on", "filepath" ]
def touch_filepath(self, filepath): '''creates a file at filepath, or updates the timestamp on filepath''' if os.path.exists(filepath): os.utime(filepath, None) else: with open(filepath, "a"): pass
[ "def", "touch_filepath", "(", "self", ",", "filepath", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "filepath", ")", ":", "os", ".", "utime", "(", "filepath", ",", "None", ")", "else", ":", "with", "open", "(", "filepath", ",", "\"a\"", ")", ":", "pass" ]
https://github.com/ArduPilot/ardupilot/blob/6e684b3496122b8158ac412b609d00004b7ac306/Tools/scripts/build_binaries.py#L384-L390
gnuradio/gnuradio
09c3c4fa4bfb1a02caac74cb5334dfe065391e3b
gr-digital/python/digital/qa_header_payload_demux.py
python
qa_header_payload_demux.test_001_headerpadding_payload_offset
(self)
Like test 1, but with header padding + payload offset.
Like test 1, but with header padding + payload offset.
[ "Like", "test", "1", "but", "with", "header", "padding", "+", "payload", "offset", "." ]
def test_001_headerpadding_payload_offset(self): """ Like test 1, but with header padding + payload offset. """ n_zeros = 3 header = [1, 2, 3] header_padding = 1 payload_offset = -1 payload = list(range(5, 20)) data_signal = [0, ] * n_zeros + header + payload + [0, ] * 100 trigger_signal = [0] * len(data_signal) trigger_signal[n_zeros] = 1 # This goes on output 1, item 3 + 1 (for payload offset) testtag4 = make_tag('tag4', 314, n_zeros + len(header) + 3) data_src = blocks.vector_source_f( data_signal, False, tags=(testtag4,) ) trigger_src = blocks.vector_source_b(trigger_signal, False) hpd = digital.header_payload_demux( len(header), 1, # Items per symbol 0, # Guard interval "frame_len", # TSB tag key "detect", # Trigger tag key False, # No symbols please gr.sizeof_float, # Item size "", # Timing tag key 1.0, # Samp rate (), # No special tags header_padding ) # extra system port defined for you self.assertEqual(pmt.length(hpd.message_ports_in()), 2) header_sink = blocks.vector_sink_f() payload_sink = blocks.vector_sink_f() self.tb.connect(data_src, (hpd, 0)) self.tb.connect(trigger_src, (hpd, 1)) self.tb.connect((hpd, 0), header_sink) self.tb.connect((hpd, 1), payload_sink) self.tb.start() time.sleep(.2) # Need this, otherwise, the next message is ignored hpd.to_basic_block()._post(pmt.intern('header_data'), pmt.to_pmt( {'frame_len': len(payload), 'payload_offset': payload_offset})) while len(payload_sink.data()) < len(payload): time.sleep(.2) self.tb.stop() self.tb.wait() # Header is now padded: self.assertEqual(header_sink.data(), [0, ] + header + [payload[0], ]) # Payload is now offset: self.assertEqual(payload_sink.data(), data_signal[n_zeros + len( header) + payload_offset:n_zeros + len(header) + payload_offset + len(payload)]) ptags_payload = {} for tag in payload_sink.tags(): ptag = gr.tag_to_python(tag) ptags_payload[ptag.key] = ptag.offset expected_tags_payload = { 'frame_len': 0, 'payload_offset': 0, 'tag4': 3 - payload_offset, } self.assertEqual(expected_tags_payload, ptags_payload)
[ "def", "test_001_headerpadding_payload_offset", "(", "self", ")", ":", "n_zeros", "=", "3", "header", "=", "[", "1", ",", "2", ",", "3", "]", "header_padding", "=", "1", "payload_offset", "=", "-", "1", "payload", "=", "list", "(", "range", "(", "5", ",", "20", ")", ")", "data_signal", "=", "[", "0", ",", "]", "*", "n_zeros", "+", "header", "+", "payload", "+", "[", "0", ",", "]", "*", "100", "trigger_signal", "=", "[", "0", "]", "*", "len", "(", "data_signal", ")", "trigger_signal", "[", "n_zeros", "]", "=", "1", "# This goes on output 1, item 3 + 1 (for payload offset)", "testtag4", "=", "make_tag", "(", "'tag4'", ",", "314", ",", "n_zeros", "+", "len", "(", "header", ")", "+", "3", ")", "data_src", "=", "blocks", ".", "vector_source_f", "(", "data_signal", ",", "False", ",", "tags", "=", "(", "testtag4", ",", ")", ")", "trigger_src", "=", "blocks", ".", "vector_source_b", "(", "trigger_signal", ",", "False", ")", "hpd", "=", "digital", ".", "header_payload_demux", "(", "len", "(", "header", ")", ",", "1", ",", "# Items per symbol", "0", ",", "# Guard interval", "\"frame_len\"", ",", "# TSB tag key", "\"detect\"", ",", "# Trigger tag key", "False", ",", "# No symbols please", "gr", ".", "sizeof_float", ",", "# Item size", "\"\"", ",", "# Timing tag key", "1.0", ",", "# Samp rate", "(", ")", ",", "# No special tags", "header_padding", ")", "# extra system port defined for you", "self", ".", "assertEqual", "(", "pmt", ".", "length", "(", "hpd", ".", "message_ports_in", "(", ")", ")", ",", "2", ")", "header_sink", "=", "blocks", ".", "vector_sink_f", "(", ")", "payload_sink", "=", "blocks", ".", "vector_sink_f", "(", ")", "self", ".", "tb", ".", "connect", "(", "data_src", ",", "(", "hpd", ",", "0", ")", ")", "self", ".", "tb", ".", "connect", "(", "trigger_src", ",", "(", "hpd", ",", "1", ")", ")", "self", ".", "tb", ".", "connect", "(", "(", "hpd", ",", "0", ")", ",", "header_sink", ")", "self", ".", "tb", ".", "connect", "(", "(", "hpd", ",", "1", ")", ",", "payload_sink", ")", "self", ".", "tb", ".", "start", "(", ")", "time", ".", "sleep", "(", ".2", ")", "# Need this, otherwise, the next message is ignored", "hpd", ".", "to_basic_block", "(", ")", ".", "_post", "(", "pmt", ".", "intern", "(", "'header_data'", ")", ",", "pmt", ".", "to_pmt", "(", "{", "'frame_len'", ":", "len", "(", "payload", ")", ",", "'payload_offset'", ":", "payload_offset", "}", ")", ")", "while", "len", "(", "payload_sink", ".", "data", "(", ")", ")", "<", "len", "(", "payload", ")", ":", "time", ".", "sleep", "(", ".2", ")", "self", ".", "tb", ".", "stop", "(", ")", "self", ".", "tb", ".", "wait", "(", ")", "# Header is now padded:", "self", ".", "assertEqual", "(", "header_sink", ".", "data", "(", ")", ",", "[", "0", ",", "]", "+", "header", "+", "[", "payload", "[", "0", "]", ",", "]", ")", "# Payload is now offset:", "self", ".", "assertEqual", "(", "payload_sink", ".", "data", "(", ")", ",", "data_signal", "[", "n_zeros", "+", "len", "(", "header", ")", "+", "payload_offset", ":", "n_zeros", "+", "len", "(", "header", ")", "+", "payload_offset", "+", "len", "(", "payload", ")", "]", ")", "ptags_payload", "=", "{", "}", "for", "tag", "in", "payload_sink", ".", "tags", "(", ")", ":", "ptag", "=", "gr", ".", "tag_to_python", "(", "tag", ")", "ptags_payload", "[", "ptag", ".", "key", "]", "=", "ptag", ".", "offset", "expected_tags_payload", "=", "{", "'frame_len'", ":", "0", ",", "'payload_offset'", ":", "0", ",", "'tag4'", ":", "3", "-", "payload_offset", ",", "}", "self", ".", "assertEqual", "(", "expected_tags_payload", ",", "ptags_payload", ")" ]
https://github.com/gnuradio/gnuradio/blob/09c3c4fa4bfb1a02caac74cb5334dfe065391e3b/gr-digital/python/digital/qa_header_payload_demux.py#L316-L377
google/llvm-propeller
45c226984fe8377ebfb2ad7713c680d652ba678d
compiler-rt/lib/sanitizer_common/scripts/cpplint.py
python
ReplaceAll
(pattern, rep, s)
return _regexp_compile_cache[pattern].sub(rep, s)
Replaces instances of pattern in a string with a replacement. The compiled regex is kept in a cache shared by Match and Search. Args: pattern: regex pattern rep: replacement text s: search string Returns: string with replacements made (or original string if no replacements)
Replaces instances of pattern in a string with a replacement.
[ "Replaces", "instances", "of", "pattern", "in", "a", "string", "with", "a", "replacement", "." ]
def ReplaceAll(pattern, rep, s): """Replaces instances of pattern in a string with a replacement. The compiled regex is kept in a cache shared by Match and Search. Args: pattern: regex pattern rep: replacement text s: search string Returns: string with replacements made (or original string if no replacements) """ if pattern not in _regexp_compile_cache: _regexp_compile_cache[pattern] = sre_compile.compile(pattern) return _regexp_compile_cache[pattern].sub(rep, s)
[ "def", "ReplaceAll", "(", "pattern", ",", "rep", ",", "s", ")", ":", "if", "pattern", "not", "in", "_regexp_compile_cache", ":", "_regexp_compile_cache", "[", "pattern", "]", "=", "sre_compile", ".", "compile", "(", "pattern", ")", "return", "_regexp_compile_cache", "[", "pattern", "]", ".", "sub", "(", "rep", ",", "s", ")" ]
https://github.com/google/llvm-propeller/blob/45c226984fe8377ebfb2ad7713c680d652ba678d/compiler-rt/lib/sanitizer_common/scripts/cpplint.py#L667-L682
xiaohaoChen/rrc_detection
4f2b110cd122da7f55e8533275a9b4809a88785a
scripts/cpp_lint.py
python
_Filters
()
return _cpplint_state.filters
Returns the module's list of output filters, as a list.
Returns the module's list of output filters, as a list.
[ "Returns", "the", "module", "s", "list", "of", "output", "filters", "as", "a", "list", "." ]
def _Filters(): """Returns the module's list of output filters, as a list.""" return _cpplint_state.filters
[ "def", "_Filters", "(", ")", ":", "return", "_cpplint_state", ".", "filters" ]
https://github.com/xiaohaoChen/rrc_detection/blob/4f2b110cd122da7f55e8533275a9b4809a88785a/scripts/cpp_lint.py#L792-L794
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/_osx_support.py
python
customize_config_vars
(_config_vars)
return _config_vars
Customize Python build configuration variables. Called internally from sysconfig with a mutable mapping containing name/value pairs parsed from the configured makefile used to build this interpreter. Returns the mapping updated as needed to reflect the environment in which the interpreter is running; in the case of a Python from a binary installer, the installed environment may be very different from the build environment, i.e. different OS levels, different built tools, different available CPU architectures. This customization is performed whenever distutils.sysconfig.get_config_vars() is first called. It may be used in environments where no compilers are present, i.e. when installing pure Python dists. Customization of compiler paths and detection of unavailable archs is deferred until the first extention module build is requested (in distutils.sysconfig.customize_compiler). Currently called from distutils.sysconfig
Customize Python build configuration variables.
[ "Customize", "Python", "build", "configuration", "variables", "." ]
def customize_config_vars(_config_vars): """Customize Python build configuration variables. Called internally from sysconfig with a mutable mapping containing name/value pairs parsed from the configured makefile used to build this interpreter. Returns the mapping updated as needed to reflect the environment in which the interpreter is running; in the case of a Python from a binary installer, the installed environment may be very different from the build environment, i.e. different OS levels, different built tools, different available CPU architectures. This customization is performed whenever distutils.sysconfig.get_config_vars() is first called. It may be used in environments where no compilers are present, i.e. when installing pure Python dists. Customization of compiler paths and detection of unavailable archs is deferred until the first extention module build is requested (in distutils.sysconfig.customize_compiler). Currently called from distutils.sysconfig """ if not _supports_universal_builds(): # On Mac OS X before 10.4, check if -arch and -isysroot # are in CFLAGS or LDFLAGS and remove them if they are. # This is needed when building extensions on a 10.3 system # using a universal build of python. _remove_universal_flags(_config_vars) # Allow user to override all archs with ARCHFLAGS env var _override_all_archs(_config_vars) # Remove references to sdks that are not found _check_for_unavailable_sdk(_config_vars) return _config_vars
[ "def", "customize_config_vars", "(", "_config_vars", ")", ":", "if", "not", "_supports_universal_builds", "(", ")", ":", "# On Mac OS X before 10.4, check if -arch and -isysroot", "# are in CFLAGS or LDFLAGS and remove them if they are.", "# This is needed when building extensions on a 10.3 system", "# using a universal build of python.", "_remove_universal_flags", "(", "_config_vars", ")", "# Allow user to override all archs with ARCHFLAGS env var", "_override_all_archs", "(", "_config_vars", ")", "# Remove references to sdks that are not found", "_check_for_unavailable_sdk", "(", "_config_vars", ")", "return", "_config_vars" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/_osx_support.py#L362-L400
psi4/psi4
be533f7f426b6ccc263904e55122899b16663395
psi4/driver/p4util/procutil.py
python
prepare_options_for_modules
(changedOnly=False, commandsInsteadDict=False)
Function to return a string of commands to replicate the current state of user-modified options. Used to capture C++ options information for distributed (sow/reap) input files. .. caution:: Some features are not yet implemented. Buy a developer a coffee. - Need some option to get either all or changed - Need some option to either get dict or set string or psimod command list - command return doesn't revoke has_changed setting for unchanged with changedOnly=False
Function to return a string of commands to replicate the current state of user-modified options. Used to capture C++ options information for distributed (sow/reap) input files.
[ "Function", "to", "return", "a", "string", "of", "commands", "to", "replicate", "the", "current", "state", "of", "user", "-", "modified", "options", ".", "Used", "to", "capture", "C", "++", "options", "information", "for", "distributed", "(", "sow", "/", "reap", ")", "input", "files", "." ]
def prepare_options_for_modules(changedOnly=False, commandsInsteadDict=False): """Function to return a string of commands to replicate the current state of user-modified options. Used to capture C++ options information for distributed (sow/reap) input files. .. caution:: Some features are not yet implemented. Buy a developer a coffee. - Need some option to get either all or changed - Need some option to either get dict or set string or psimod command list - command return doesn't revoke has_changed setting for unchanged with changedOnly=False """ options = collections.defaultdict(dict) commands = '' for opt in core.get_global_option_list(): if core.has_global_option_changed(opt) or not changedOnly: if opt in ['DFT_CUSTOM_FUNCTIONAL', 'EXTERN']: # Feb 2017 hack continue val = core.get_global_option(opt) options['GLOBALS'][opt] = {'value': val, 'has_changed': core.has_global_option_changed(opt)} if isinstance(val, str): commands += """core.set_global_option('%s', '%s')\n""" % (opt, val) else: commands += """core.set_global_option('%s', %s)\n""" % (opt, val) #if changedOnly: # print('Appending module %s option %s value %s has_changed %s.' % \ # ('GLOBALS', opt, core.get_global_option(opt), core.has_global_option_changed(opt))) for module in _modules: if core.option_exists_in_module(module, opt): hoc = core.has_option_changed(module, opt) if hoc or not changedOnly: val = core.get_option(module, opt) options[module][opt] = {'value': val, 'has_changed': hoc} if isinstance(val, str): commands += """core.set_local_option('%s', '%s', '%s')\n""" % (module, opt, val) else: commands += """core.set_local_option('%s', '%s', %s)\n""" % (module, opt, val) #if changedOnly: # print('Appending module %s option %s value %s has_changed %s.' % \ # (module, opt, core.get_option(module, opt), hoc)) if commandsInsteadDict: return commands else: return options
[ "def", "prepare_options_for_modules", "(", "changedOnly", "=", "False", ",", "commandsInsteadDict", "=", "False", ")", ":", "options", "=", "collections", ".", "defaultdict", "(", "dict", ")", "commands", "=", "''", "for", "opt", "in", "core", ".", "get_global_option_list", "(", ")", ":", "if", "core", ".", "has_global_option_changed", "(", "opt", ")", "or", "not", "changedOnly", ":", "if", "opt", "in", "[", "'DFT_CUSTOM_FUNCTIONAL'", ",", "'EXTERN'", "]", ":", "# Feb 2017 hack", "continue", "val", "=", "core", ".", "get_global_option", "(", "opt", ")", "options", "[", "'GLOBALS'", "]", "[", "opt", "]", "=", "{", "'value'", ":", "val", ",", "'has_changed'", ":", "core", ".", "has_global_option_changed", "(", "opt", ")", "}", "if", "isinstance", "(", "val", ",", "str", ")", ":", "commands", "+=", "\"\"\"core.set_global_option('%s', '%s')\\n\"\"\"", "%", "(", "opt", ",", "val", ")", "else", ":", "commands", "+=", "\"\"\"core.set_global_option('%s', %s)\\n\"\"\"", "%", "(", "opt", ",", "val", ")", "#if changedOnly:", "# print('Appending module %s option %s value %s has_changed %s.' % \\", "# ('GLOBALS', opt, core.get_global_option(opt), core.has_global_option_changed(opt)))", "for", "module", "in", "_modules", ":", "if", "core", ".", "option_exists_in_module", "(", "module", ",", "opt", ")", ":", "hoc", "=", "core", ".", "has_option_changed", "(", "module", ",", "opt", ")", "if", "hoc", "or", "not", "changedOnly", ":", "val", "=", "core", ".", "get_option", "(", "module", ",", "opt", ")", "options", "[", "module", "]", "[", "opt", "]", "=", "{", "'value'", ":", "val", ",", "'has_changed'", ":", "hoc", "}", "if", "isinstance", "(", "val", ",", "str", ")", ":", "commands", "+=", "\"\"\"core.set_local_option('%s', '%s', '%s')\\n\"\"\"", "%", "(", "module", ",", "opt", ",", "val", ")", "else", ":", "commands", "+=", "\"\"\"core.set_local_option('%s', '%s', %s)\\n\"\"\"", "%", "(", "module", ",", "opt", ",", "val", ")", "#if changedOnly:", "# print('Appending module %s option %s value %s has_changed %s.' % \\", "# (module, opt, core.get_option(module, opt), hoc))", "if", "commandsInsteadDict", ":", "return", "commands", "else", ":", "return", "options" ]
https://github.com/psi4/psi4/blob/be533f7f426b6ccc263904e55122899b16663395/psi4/driver/p4util/procutil.py#L383-L429
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/build/waf-1.7.13/waflib/extras/misc.py
python
copy_func
(tsk)
Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)
Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)
[ "Make", "a", "file", "copy", ".", "This", "might", "be", "used", "to", "make", "other", "kinds", "of", "file", "processing", "(", "even", "calling", "a", "compiler", "is", "possible", ")" ]
def copy_func(tsk): "Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)" env = tsk.env infile = tsk.inputs[0].abspath() outfile = tsk.outputs[0].abspath() try: shutil.copy2(infile, outfile) except (OSError, IOError): return 1 else: if tsk.chmod: os.chmod(outfile, tsk.chmod) return 0
[ "def", "copy_func", "(", "tsk", ")", ":", "env", "=", "tsk", ".", "env", "infile", "=", "tsk", ".", "inputs", "[", "0", "]", ".", "abspath", "(", ")", "outfile", "=", "tsk", ".", "outputs", "[", "0", "]", ".", "abspath", "(", ")", "try", ":", "shutil", ".", "copy2", "(", "infile", ",", "outfile", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "return", "1", "else", ":", "if", "tsk", ".", "chmod", ":", "os", ".", "chmod", "(", "outfile", ",", "tsk", ".", "chmod", ")", "return", "0" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/build/waf-1.7.13/waflib/extras/misc.py#L28-L39
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_controls.py
python
CollapsiblePaneEvent.__init__
(self, *args, **kwargs)
__init__(self, Object generator, int id, bool collapsed) -> CollapsiblePaneEvent
__init__(self, Object generator, int id, bool collapsed) -> CollapsiblePaneEvent
[ "__init__", "(", "self", "Object", "generator", "int", "id", "bool", "collapsed", ")", "-", ">", "CollapsiblePaneEvent" ]
def __init__(self, *args, **kwargs): """__init__(self, Object generator, int id, bool collapsed) -> CollapsiblePaneEvent""" _controls_.CollapsiblePaneEvent_swiginit(self,_controls_.new_CollapsiblePaneEvent(*args, **kwargs))
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_controls_", ".", "CollapsiblePaneEvent_swiginit", "(", "self", ",", "_controls_", ".", "new_CollapsiblePaneEvent", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_controls.py#L7403-L7405
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/richtext.py
python
RichTextFileHandler.CanSave
(*args, **kwargs)
return _richtext.RichTextFileHandler_CanSave(*args, **kwargs)
CanSave(self) -> bool
CanSave(self) -> bool
[ "CanSave", "(", "self", ")", "-", ">", "bool" ]
def CanSave(*args, **kwargs): """CanSave(self) -> bool""" return _richtext.RichTextFileHandler_CanSave(*args, **kwargs)
[ "def", "CanSave", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_richtext", ".", "RichTextFileHandler_CanSave", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/richtext.py#L2772-L2774
pirobot/rbx2
2a6544799fcf062e7b6bd5cf2981b2a84c0c7d2a
rbx2_utils/src/rbx2_utils/srv/_LaunchProcess.py
python
LaunchProcessRequest.deserialize_numpy
(self, str, numpy)
unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module
unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module
[ "unpack", "serialized", "message", "in", "str", "into", "this", "message", "instance", "using", "numpy", "for", "array", "types", ":", "param", "str", ":", "byte", "array", "of", "serialized", "message", "str", ":", "param", "numpy", ":", "numpy", "python", "module" ]
def deserialize_numpy(self, str, numpy): """ unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module """ try: end = 0 start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.command = str[start:end].decode('utf-8') else: self.command = str[start:end] return self except struct.error as e: raise genpy.DeserializationError(e)
[ "def", "deserialize_numpy", "(", "self", ",", "str", ",", "numpy", ")", ":", "try", ":", "end", "=", "0", "start", "=", "end", "end", "+=", "4", "(", "length", ",", ")", "=", "_struct_I", ".", "unpack", "(", "str", "[", "start", ":", "end", "]", ")", "start", "=", "end", "end", "+=", "length", "if", "python3", ":", "self", ".", "command", "=", "str", "[", "start", ":", "end", "]", ".", "decode", "(", "'utf-8'", ")", "else", ":", "self", ".", "command", "=", "str", "[", "start", ":", "end", "]", "return", "self", "except", "struct", ".", "error", "as", "e", ":", "raise", "genpy", ".", "DeserializationError", "(", "e", ")" ]
https://github.com/pirobot/rbx2/blob/2a6544799fcf062e7b6bd5cf2981b2a84c0c7d2a/rbx2_utils/src/rbx2_utils/srv/_LaunchProcess.py#L98-L117
domino-team/openwrt-cc
8b181297c34d14d3ca521cc9f31430d561dbc688
package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
python
AddArch
(output, arch)
return '%s.%s%s' % (output, arch, extension)
Adds an arch string to an output path.
Adds an arch string to an output path.
[ "Adds", "an", "arch", "string", "to", "an", "output", "path", "." ]
def AddArch(output, arch): """Adds an arch string to an output path.""" output, extension = os.path.splitext(output) return '%s.%s%s' % (output, arch, extension)
[ "def", "AddArch", "(", "output", ",", "arch", ")", ":", "output", ",", "extension", "=", "os", ".", "path", ".", "splitext", "(", "output", ")", "return", "'%s.%s%s'", "%", "(", "output", ",", "arch", ",", "extension", ")" ]
https://github.com/domino-team/openwrt-cc/blob/8b181297c34d14d3ca521cc9f31430d561dbc688/package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py#L95-L98
microsoft/CNTK
e9396480025b9ca457d26b6f33dd07c474c6aa04
bindings/python/cntk/contrib/crosstalk/__init__.py
python
Crosstalk.reset
(self)
Reset all variables and passes, setter/getter functions for variable types are kept
Reset all variables and passes, setter/getter functions for variable types are kept
[ "Reset", "all", "variables", "and", "passes", "setter", "/", "getter", "functions", "for", "variable", "types", "are", "kept" ]
def reset(self): ''' Reset all variables and passes, setter/getter functions for variable types are kept ''' self.vars = {} self.passes = 0
[ "def", "reset", "(", "self", ")", ":", "self", ".", "vars", "=", "{", "}", "self", ".", "passes", "=", "0" ]
https://github.com/microsoft/CNTK/blob/e9396480025b9ca457d26b6f33dd07c474c6aa04/bindings/python/cntk/contrib/crosstalk/__init__.py#L301-L306
jiangxiluning/FOTS.PyTorch
b1851c170b4f1ad18406766352cb5171648ce603
FOTS/utils/detect.py
python
adjust_ratio
(boxes, ratio_w, ratio_h)
return np.around(boxes)
refine boxes Input: boxes : detected polys <numpy.ndarray, (n,9)> ratio_w: ratio of width ratio_h: ratio of height Output: refined boxes
refine boxes Input: boxes : detected polys <numpy.ndarray, (n,9)> ratio_w: ratio of width ratio_h: ratio of height Output: refined boxes
[ "refine", "boxes", "Input", ":", "boxes", ":", "detected", "polys", "<numpy", ".", "ndarray", "(", "n", "9", ")", ">", "ratio_w", ":", "ratio", "of", "width", "ratio_h", ":", "ratio", "of", "height", "Output", ":", "refined", "boxes" ]
def adjust_ratio(boxes, ratio_w, ratio_h): '''refine boxes Input: boxes : detected polys <numpy.ndarray, (n,9)> ratio_w: ratio of width ratio_h: ratio of height Output: refined boxes ''' if boxes is None or boxes.size == 0: return None boxes[:,[0,2,4,6]] /= ratio_w boxes[:,[1,3,5,7]] /= ratio_h return np.around(boxes)
[ "def", "adjust_ratio", "(", "boxes", ",", "ratio_w", ",", "ratio_h", ")", ":", "if", "boxes", "is", "None", "or", "boxes", ".", "size", "==", "0", ":", "return", "None", "boxes", "[", ":", ",", "[", "0", ",", "2", ",", "4", ",", "6", "]", "]", "/=", "ratio_w", "boxes", "[", ":", ",", "[", "1", ",", "3", ",", "5", ",", "7", "]", "]", "/=", "ratio_h", "return", "np", ".", "around", "(", "boxes", ")" ]
https://github.com/jiangxiluning/FOTS.PyTorch/blob/b1851c170b4f1ad18406766352cb5171648ce603/FOTS/utils/detect.py#L137-L150
google/syzygy
8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5
third_party/numpy/files/numpy/ma/core.py
python
MaskedArray.__float__
(self)
return float(self.item())
Convert to float.
Convert to float.
[ "Convert", "to", "float", "." ]
def __float__(self): "Convert to float." if self.size > 1: raise TypeError("Only length-1 arrays can be converted "\ "to Python scalars") elif self._mask: warnings.warn("Warning: converting a masked element to nan.") return np.nan return float(self.item())
[ "def", "__float__", "(", "self", ")", ":", "if", "self", ".", "size", ">", "1", ":", "raise", "TypeError", "(", "\"Only length-1 arrays can be converted \"", "\"to Python scalars\"", ")", "elif", "self", ".", "_mask", ":", "warnings", ".", "warn", "(", "\"Warning: converting a masked element to nan.\"", ")", "return", "np", ".", "nan", "return", "float", "(", "self", ".", "item", "(", ")", ")" ]
https://github.com/google/syzygy/blob/8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5/third_party/numpy/files/numpy/ma/core.py#L3779-L3787
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/parfor.py
python
push_call_vars
(blocks, saved_globals, saved_getattrs, nested=False)
return
push call variables to right before their call site. assuming one global/getattr is created for each call site and control flow doesn't change it.
push call variables to right before their call site. assuming one global/getattr is created for each call site and control flow doesn't change it.
[ "push", "call", "variables", "to", "right", "before", "their", "call", "site", ".", "assuming", "one", "global", "/", "getattr", "is", "created", "for", "each", "call", "site", "and", "control", "flow", "doesn", "t", "change", "it", "." ]
def push_call_vars(blocks, saved_globals, saved_getattrs, nested=False): """push call variables to right before their call site. assuming one global/getattr is created for each call site and control flow doesn't change it. """ for block in blocks.values(): new_body = [] # global/attr variables that are defined in this block already, # no need to reassign them block_defs = set() for stmt in block.body: def process_assign(stmt): if isinstance(stmt, ir.Assign): rhs = stmt.value lhs = stmt.target if (isinstance(rhs, ir.Global)): saved_globals[lhs.name] = stmt block_defs.add(lhs.name) elif isinstance(rhs, ir.Expr) and rhs.op == 'getattr': if (rhs.value.name in saved_globals or rhs.value.name in saved_getattrs): saved_getattrs[lhs.name] = stmt block_defs.add(lhs.name) if not nested and isinstance(stmt, Parfor): for s in stmt.init_block.body: process_assign(s) pblocks = stmt.loop_body.copy() push_call_vars(pblocks, saved_globals, saved_getattrs, nested=True) new_body.append(stmt) continue else: process_assign(stmt) for v in stmt.list_vars(): new_body += _get_saved_call_nodes(v.name, saved_globals, saved_getattrs, block_defs) new_body.append(stmt) block.body = new_body return
[ "def", "push_call_vars", "(", "blocks", ",", "saved_globals", ",", "saved_getattrs", ",", "nested", "=", "False", ")", ":", "for", "block", "in", "blocks", ".", "values", "(", ")", ":", "new_body", "=", "[", "]", "# global/attr variables that are defined in this block already,", "# no need to reassign them", "block_defs", "=", "set", "(", ")", "for", "stmt", "in", "block", ".", "body", ":", "def", "process_assign", "(", "stmt", ")", ":", "if", "isinstance", "(", "stmt", ",", "ir", ".", "Assign", ")", ":", "rhs", "=", "stmt", ".", "value", "lhs", "=", "stmt", ".", "target", "if", "(", "isinstance", "(", "rhs", ",", "ir", ".", "Global", ")", ")", ":", "saved_globals", "[", "lhs", ".", "name", "]", "=", "stmt", "block_defs", ".", "add", "(", "lhs", ".", "name", ")", "elif", "isinstance", "(", "rhs", ",", "ir", ".", "Expr", ")", "and", "rhs", ".", "op", "==", "'getattr'", ":", "if", "(", "rhs", ".", "value", ".", "name", "in", "saved_globals", "or", "rhs", ".", "value", ".", "name", "in", "saved_getattrs", ")", ":", "saved_getattrs", "[", "lhs", ".", "name", "]", "=", "stmt", "block_defs", ".", "add", "(", "lhs", ".", "name", ")", "if", "not", "nested", "and", "isinstance", "(", "stmt", ",", "Parfor", ")", ":", "for", "s", "in", "stmt", ".", "init_block", ".", "body", ":", "process_assign", "(", "s", ")", "pblocks", "=", "stmt", ".", "loop_body", ".", "copy", "(", ")", "push_call_vars", "(", "pblocks", ",", "saved_globals", ",", "saved_getattrs", ",", "nested", "=", "True", ")", "new_body", ".", "append", "(", "stmt", ")", "continue", "else", ":", "process_assign", "(", "stmt", ")", "for", "v", "in", "stmt", ".", "list_vars", "(", ")", ":", "new_body", "+=", "_get_saved_call_nodes", "(", "v", ".", "name", ",", "saved_globals", ",", "saved_getattrs", ",", "block_defs", ")", "new_body", ".", "append", "(", "stmt", ")", "block", ".", "body", "=", "new_body", "return" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/parfor.py#L3929-L3968
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/numpy/py3/numpy/core/defchararray.py
python
join
(sep, seq)
return _to_string_or_unicode_array( _vec_string(sep, object_, 'join', (seq,)))
Return a string which is the concatenation of the strings in the sequence `seq`. Calls `str.join` element-wise. Parameters ---------- sep : array_like of str or unicode seq : array_like of str or unicode Returns ------- out : ndarray Output array of str or unicode, depending on input types See Also -------- str.join
Return a string which is the concatenation of the strings in the sequence `seq`.
[ "Return", "a", "string", "which", "is", "the", "concatenation", "of", "the", "strings", "in", "the", "sequence", "seq", "." ]
def join(sep, seq): """ Return a string which is the concatenation of the strings in the sequence `seq`. Calls `str.join` element-wise. Parameters ---------- sep : array_like of str or unicode seq : array_like of str or unicode Returns ------- out : ndarray Output array of str or unicode, depending on input types See Also -------- str.join """ return _to_string_or_unicode_array( _vec_string(sep, object_, 'join', (seq,)))
[ "def", "join", "(", "sep", ",", "seq", ")", ":", "return", "_to_string_or_unicode_array", "(", "_vec_string", "(", "sep", ",", "object_", ",", "'join'", ",", "(", "seq", ",", ")", ")", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py3/numpy/core/defchararray.py#L939-L961
intel/caffe
3f494b442ee3f9d17a07b09ecbd5fa2bbda00836
examples/faster-rcnn/lib/fast_rcnn/config.py
python
_merge_a_into_b
(a, b)
Merge config dictionary a into config dictionary b, clobbering the options in b whenever they are also specified in a.
Merge config dictionary a into config dictionary b, clobbering the options in b whenever they are also specified in a.
[ "Merge", "config", "dictionary", "a", "into", "config", "dictionary", "b", "clobbering", "the", "options", "in", "b", "whenever", "they", "are", "also", "specified", "in", "a", "." ]
def _merge_a_into_b(a, b): """Merge config dictionary a into config dictionary b, clobbering the options in b whenever they are also specified in a. """ if type(a) is not edict: return for k, v in a.iteritems(): # a must specify keys that are in b if not b.has_key(k): raise KeyError('{} is not a valid config key'.format(k)) # the types must match, too old_type = type(b[k]) if old_type is not type(v): if isinstance(b[k], np.ndarray): v = np.array(v, dtype=b[k].dtype) else: raise ValueError(('Type mismatch ({} vs. {}) ' 'for config key: {}').format(type(b[k]), type(v), k)) # recursively merge dicts if type(v) is edict: try: _merge_a_into_b(a[k], b[k]) except: print('Error under config key: {}'.format(k)) raise else: b[k] = v
[ "def", "_merge_a_into_b", "(", "a", ",", "b", ")", ":", "if", "type", "(", "a", ")", "is", "not", "edict", ":", "return", "for", "k", ",", "v", "in", "a", ".", "iteritems", "(", ")", ":", "# a must specify keys that are in b", "if", "not", "b", ".", "has_key", "(", "k", ")", ":", "raise", "KeyError", "(", "'{} is not a valid config key'", ".", "format", "(", "k", ")", ")", "# the types must match, too", "old_type", "=", "type", "(", "b", "[", "k", "]", ")", "if", "old_type", "is", "not", "type", "(", "v", ")", ":", "if", "isinstance", "(", "b", "[", "k", "]", ",", "np", ".", "ndarray", ")", ":", "v", "=", "np", ".", "array", "(", "v", ",", "dtype", "=", "b", "[", "k", "]", ".", "dtype", ")", "else", ":", "raise", "ValueError", "(", "(", "'Type mismatch ({} vs. {}) '", "'for config key: {}'", ")", ".", "format", "(", "type", "(", "b", "[", "k", "]", ")", ",", "type", "(", "v", ")", ",", "k", ")", ")", "# recursively merge dicts", "if", "type", "(", "v", ")", "is", "edict", ":", "try", ":", "_merge_a_into_b", "(", "a", "[", "k", "]", ",", "b", "[", "k", "]", ")", "except", ":", "print", "(", "'Error under config key: {}'", ".", "format", "(", "k", ")", ")", "raise", "else", ":", "b", "[", "k", "]", "=", "v" ]
https://github.com/intel/caffe/blob/3f494b442ee3f9d17a07b09ecbd5fa2bbda00836/examples/faster-rcnn/lib/fast_rcnn/config.py#L225-L255
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_windows.py
python
VarHScrollHelper.RefreshColumns
(*args, **kwargs)
return _windows_.VarHScrollHelper_RefreshColumns(*args, **kwargs)
RefreshColumns(self, size_t from, size_t to)
RefreshColumns(self, size_t from, size_t to)
[ "RefreshColumns", "(", "self", "size_t", "from", "size_t", "to", ")" ]
def RefreshColumns(*args, **kwargs): """RefreshColumns(self, size_t from, size_t to)""" return _windows_.VarHScrollHelper_RefreshColumns(*args, **kwargs)
[ "def", "RefreshColumns", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_windows_", ".", "VarHScrollHelper_RefreshColumns", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_windows.py#L2340-L2342
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/python/ops/nn_ops.py
python
xw_plus_b_v1
(x, weights, biases, name=None)
Computes matmul(x, weights) + biases. This is a deprecated version of that will soon be removed. Args: x: a 2D tensor. Dimensions typically: batch, in_units weights: a 2D tensor. Dimensions typically: in_units, out_units biases: a 1D tensor. Dimensions: out_units name: A name for the operation (optional). If not specified "xw_plus_b_v1" is used. Returns: A 2-D Tensor computing matmul(x, weights) + biases. Dimensions typically: batch, out_units.
Computes matmul(x, weights) + biases.
[ "Computes", "matmul", "(", "x", "weights", ")", "+", "biases", "." ]
def xw_plus_b_v1(x, weights, biases, name=None): # pylint: disable=invalid-name """Computes matmul(x, weights) + biases. This is a deprecated version of that will soon be removed. Args: x: a 2D tensor. Dimensions typically: batch, in_units weights: a 2D tensor. Dimensions typically: in_units, out_units biases: a 1D tensor. Dimensions: out_units name: A name for the operation (optional). If not specified "xw_plus_b_v1" is used. Returns: A 2-D Tensor computing matmul(x, weights) + biases. Dimensions typically: batch, out_units. """ with ops.op_scope([x, weights, biases], name, "xw_plus_b_v1") as name: x = ops.convert_to_tensor(x, name="x") weights = ops.convert_to_tensor(weights, name="weights") biases = ops.convert_to_tensor(biases, name="biases") mm = math_ops.matmul(x, weights) return bias_add_v1(mm, biases, name=name)
[ "def", "xw_plus_b_v1", "(", "x", ",", "weights", ",", "biases", ",", "name", "=", "None", ")", ":", "# pylint: disable=invalid-name", "with", "ops", ".", "op_scope", "(", "[", "x", ",", "weights", ",", "biases", "]", ",", "name", ",", "\"xw_plus_b_v1\"", ")", "as", "name", ":", "x", "=", "ops", ".", "convert_to_tensor", "(", "x", ",", "name", "=", "\"x\"", ")", "weights", "=", "ops", ".", "convert_to_tensor", "(", "weights", ",", "name", "=", "\"weights\"", ")", "biases", "=", "ops", ".", "convert_to_tensor", "(", "biases", ",", "name", "=", "\"biases\"", ")", "mm", "=", "math_ops", ".", "matmul", "(", "x", ",", "weights", ")", "return", "bias_add_v1", "(", "mm", ",", "biases", ",", "name", "=", "name", ")" ]
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/python/ops/nn_ops.py#L1053-L1074
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/image_ops_impl.py
python
_Assert3DImage
(image)
return control_flow_ops.with_dependencies( _Check3DImage(image, require_static=False), image)
Assert that we are working with a properly shaped image. Performs the check statically if possible (i.e. if the shape is statically known). Otherwise adds a control dependency to an assert op that checks the dynamic shape. Args: image: 3-D Tensor of shape [height, width, channels] Raises: ValueError: if `image.shape` is not a 3-vector. Returns: If the shape of `image` could be verified statically, `image` is returned unchanged, otherwise there will be a control dependency added that asserts the correct dynamic shape.
Assert that we are working with a properly shaped image.
[ "Assert", "that", "we", "are", "working", "with", "a", "properly", "shaped", "image", "." ]
def _Assert3DImage(image): """Assert that we are working with a properly shaped image. Performs the check statically if possible (i.e. if the shape is statically known). Otherwise adds a control dependency to an assert op that checks the dynamic shape. Args: image: 3-D Tensor of shape [height, width, channels] Raises: ValueError: if `image.shape` is not a 3-vector. Returns: If the shape of `image` could be verified statically, `image` is returned unchanged, otherwise there will be a control dependency added that asserts the correct dynamic shape. """ return control_flow_ops.with_dependencies( _Check3DImage(image, require_static=False), image)
[ "def", "_Assert3DImage", "(", "image", ")", ":", "return", "control_flow_ops", ".", "with_dependencies", "(", "_Check3DImage", "(", "image", ",", "require_static", "=", "False", ")", ",", "image", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/image_ops_impl.py#L153-L172
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/py/py/_path/svnurl.py
python
SvnCommandPath.mkdir
(self, *args, **kwargs)
return createpath
create & return the directory joined with args. pass a 'msg' keyword argument to set the commit message.
create & return the directory joined with args. pass a 'msg' keyword argument to set the commit message.
[ "create", "&", "return", "the", "directory", "joined", "with", "args", ".", "pass", "a", "msg", "keyword", "argument", "to", "set", "the", "commit", "message", "." ]
def mkdir(self, *args, **kwargs): """ create & return the directory joined with args. pass a 'msg' keyword argument to set the commit message. """ commit_msg = kwargs.get('msg', "mkdir by py lib invocation") createpath = self.join(*args) createpath._svnwrite('mkdir', '-m', commit_msg) self._norev_delentry(createpath.dirpath()) return createpath
[ "def", "mkdir", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "commit_msg", "=", "kwargs", ".", "get", "(", "'msg'", ",", "\"mkdir by py lib invocation\"", ")", "createpath", "=", "self", ".", "join", "(", "*", "args", ")", "createpath", ".", "_svnwrite", "(", "'mkdir'", ",", "'-m'", ",", "commit_msg", ")", "self", ".", "_norev_delentry", "(", "createpath", ".", "dirpath", "(", ")", ")", "return", "createpath" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/py/py/_path/svnurl.py#L123-L131
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/ed_main.py
python
MainWindow.GetShelf
(self)
return self._shelf
Get this windows Shelf @return: reference to L{iface.Shelf} instance @note: returns the plugin instance not the actual notebook, if a reference to the notebook is needed for parenting call GetWindow on the object returned by this function.
Get this windows Shelf @return: reference to L{iface.Shelf} instance @note: returns the plugin instance not the actual notebook, if a reference to the notebook is needed for parenting call GetWindow on the object returned by this function.
[ "Get", "this", "windows", "Shelf", "@return", ":", "reference", "to", "L", "{", "iface", ".", "Shelf", "}", "instance", "@note", ":", "returns", "the", "plugin", "instance", "not", "the", "actual", "notebook", "if", "a", "reference", "to", "the", "notebook", "is", "needed", "for", "parenting", "call", "GetWindow", "on", "the", "object", "returned", "by", "this", "function", "." ]
def GetShelf(self): """Get this windows Shelf @return: reference to L{iface.Shelf} instance @note: returns the plugin instance not the actual notebook, if a reference to the notebook is needed for parenting call GetWindow on the object returned by this function. """ return self._shelf
[ "def", "GetShelf", "(", "self", ")", ":", "return", "self", ".", "_shelf" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/ed_main.py#L515-L523