nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
nerdvegas/rez
d392c65bf63b4bca8106f938cec49144ba54e770
src/rez/vendor/memcache/memcache.py
python
_Host.send_cmds
(self, cmds)
cmds already has trailing \r\n's applied.
cmds already has trailing \r\n's applied.
[ "cmds", "already", "has", "trailing", "\\", "r", "\\", "n", "s", "applied", "." ]
def send_cmds(self, cmds): """cmds already has trailing \r\n's applied.""" if isinstance(cmds, six.text_type): cmds = cmds.encode('utf8') self.socket.sendall(cmds)
[ "def", "send_cmds", "(", "self", ",", "cmds", ")", ":", "if", "isinstance", "(", "cmds", ",", "six", ".", "text_type", ")", ":", "cmds", "=", "cmds", ".", "encode", "(", "'utf8'", ")", "self", ".", "socket", ".", "sendall", "(", "cmds", ")" ]
https://github.com/nerdvegas/rez/blob/d392c65bf63b4bca8106f938cec49144ba54e770/src/rez/vendor/memcache/memcache.py#L1427-L1431
google/Legilimency
02cd38fd82a90bca789b8bf61c88f4bc5dd1eb07
MemClient.py
python
MemClient.read64
(self, addr)
return tokens[0] if addr % (2 * QWORD_SIZE) == 0 else tokens[1]
Reads the 64-bit value at the given address.
Reads the 64-bit value at the given address.
[ "Reads", "the", "64", "-", "bit", "value", "at", "the", "given", "address", "." ]
def read64(self, addr): """ Reads the 64-bit value at the given address. """ if addr % QWORD_SIZE != 0: raise Exception("Address must be QWORD-aligned : 0x%016X" % addr) dqword_aligned_addr = addr - (addr % (2 * QWORD_SIZE)) tokens = self.read128(dqword_aligned_addr) return tokens[0] if addr % (2 * QWORD_SIZE) == 0 else tokens[1]
[ "def", "read64", "(", "self", ",", "addr", ")", ":", "if", "addr", "%", "QWORD_SIZE", "!=", "0", ":", "raise", "Exception", "(", "\"Address must be QWORD-aligned : 0x%016X\"", "%", "addr", ")", "dqword_aligned_addr", "=", "addr", "-", "(", "addr", "%", "(", "2", "*", "QWORD_SIZE", ")", ")", "tokens", "=", "self", ".", "read128", "(", "dqword_aligned_addr", ")", "return", "tokens", "[", "0", "]", "if", "addr", "%", "(", "2", "*", "QWORD_SIZE", ")", "==", "0", "else", "tokens", "[", "1", "]" ]
https://github.com/google/Legilimency/blob/02cd38fd82a90bca789b8bf61c88f4bc5dd1eb07/MemClient.py#L106-L114
smart-mobile-software/gitstack
d9fee8f414f202143eb6e620529e8e5539a2af56
python/Lib/lib-tk/Tkinter.py
python
Menu.activate
(self, index)
Activate entry at INDEX.
Activate entry at INDEX.
[ "Activate", "entry", "at", "INDEX", "." ]
def activate(self, index): """Activate entry at INDEX.""" self.tk.call(self._w, 'activate', index)
[ "def", "activate", "(", "self", ",", "index", ")", ":", "self", ".", "tk", ".", "call", "(", "self", ".", "_w", ",", "'activate'", ",", "index", ")" ]
https://github.com/smart-mobile-software/gitstack/blob/d9fee8f414f202143eb6e620529e8e5539a2af56/python/Lib/lib-tk/Tkinter.py#L2606-L2608
XX-net/XX-Net
a9898cfcf0084195fb7e69b6bc834e59aecdf14f
code/default/lib/noarch/sortedcontainers/sortedlist.py
python
SortedList.__contains__
(self, val)
return _lists[pos][idx] == val
Return True if and only if *val* is an element in the list.
Return True if and only if *val* is an element in the list.
[ "Return", "True", "if", "and", "only", "if", "*", "val", "*", "is", "an", "element", "in", "the", "list", "." ]
def __contains__(self, val): """Return True if and only if *val* is an element in the list.""" _maxes = self._maxes if not _maxes: return False pos = bisect_left(_maxes, val) if pos == len(_maxes): return False _lists = self._lists idx = bisect_left(_lists[pos], val) return _lists[pos][idx] == val
[ "def", "__contains__", "(", "self", ",", "val", ")", ":", "_maxes", "=", "self", ".", "_maxes", "if", "not", "_maxes", ":", "return", "False", "pos", "=", "bisect_left", "(", "_maxes", ",", "val", ")", "if", "pos", "==", "len", "(", "_maxes", ")", ":", "return", "False", "_lists", "=", "self", ".", "_lists", "idx", "=", "bisect_left", "(", "_lists", "[", "pos", "]", ",", "val", ")", "return", "_lists", "[", "pos", "]", "[", "idx", "]", "==", "val" ]
https://github.com/XX-net/XX-Net/blob/a9898cfcf0084195fb7e69b6bc834e59aecdf14f/code/default/lib/noarch/sortedcontainers/sortedlist.py#L153-L167
isce-framework/isce2
0e5114a8bede3caf1d533d98e44dfe4b983e3f48
components/isceobj/Scene/Frame.py
python
Frame.getSensingStop
(self)
return self._sensingStop
The UTC date and time of the last azimuth line
The UTC date and time of the last azimuth line
[ "The", "UTC", "date", "and", "time", "of", "the", "last", "azimuth", "line" ]
def getSensingStop(self): """The UTC date and time of the last azimuth line""" return self._sensingStop
[ "def", "getSensingStop", "(", "self", ")", ":", "return", "self", ".", "_sensingStop" ]
https://github.com/isce-framework/isce2/blob/0e5114a8bede3caf1d533d98e44dfe4b983e3f48/components/isceobj/Scene/Frame.py#L392-L394
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_hxb2/lib/python3.5/site-packages/pip/_vendor/html5lib/_inputstream.py
python
HTMLUnicodeInputStream._position
(self, offset)
return (positionLine, positionColumn)
[]
def _position(self, offset): chunk = self.chunk nLines = chunk.count('\n', 0, offset) positionLine = self.prevNumLines + nLines lastLinePos = chunk.rfind('\n', 0, offset) if lastLinePos == -1: positionColumn = self.prevNumCols + offset else: positionColumn = offset - (lastLinePos + 1) return (positionLine, positionColumn)
[ "def", "_position", "(", "self", ",", "offset", ")", ":", "chunk", "=", "self", ".", "chunk", "nLines", "=", "chunk", ".", "count", "(", "'\\n'", ",", "0", ",", "offset", ")", "positionLine", "=", "self", ".", "prevNumLines", "+", "nLines", "lastLinePos", "=", "chunk", ".", "rfind", "(", "'\\n'", ",", "0", ",", "offset", ")", "if", "lastLinePos", "==", "-", "1", ":", "positionColumn", "=", "self", ".", "prevNumCols", "+", "offset", "else", ":", "positionColumn", "=", "offset", "-", "(", "lastLinePos", "+", "1", ")", "return", "(", "positionLine", ",", "positionColumn", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/pip/_vendor/html5lib/_inputstream.py#L224-L233
tobegit3hub/deep_image_model
8a53edecd9e00678b278bb10f6fb4bdb1e4ee25e
java_predict_client/src/main/proto/tensorflow/contrib/learn/python/learn/preprocessing/text.py
python
VocabularyProcessor.save
(self, filename)
Saves vocabulary processor into given file. Args: filename: Path to output file.
Saves vocabulary processor into given file.
[ "Saves", "vocabulary", "processor", "into", "given", "file", "." ]
def save(self, filename): """Saves vocabulary processor into given file. Args: filename: Path to output file. """ with gfile.Open(filename, 'wb') as f: f.write(pickle.dumps(self))
[ "def", "save", "(", "self", ",", "filename", ")", ":", "with", "gfile", ".", "Open", "(", "filename", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "pickle", ".", "dumps", "(", "self", ")", ")" ]
https://github.com/tobegit3hub/deep_image_model/blob/8a53edecd9e00678b278bb10f6fb4bdb1e4ee25e/java_predict_client/src/main/proto/tensorflow/contrib/learn/python/learn/preprocessing/text.py#L206-L213
facebookresearch/BLINK
5fe254dd64d37332347edc73738edcb56096183f
blink/candidate_ranking/bert_reranking.py
python
BertReranker._get_context_token_representation
( context_key, sample, tokenizer, max_sub_seq_length, start_token, end_token, mention_text_key="text", tagged=True, )
return context_tokens
Tags the mention, trims the context and concatenates everything to form the context representation
Tags the mention, trims the context and concatenates everything to form the context representation
[ "Tags", "the", "mention", "trims", "the", "context", "and", "concatenates", "everything", "to", "form", "the", "context", "representation" ]
def _get_context_token_representation( context_key, sample, tokenizer, max_sub_seq_length, start_token, end_token, mention_text_key="text", tagged=True, ): """Tags the mention, trims the context and concatenates everything to form the context representation""" mention_tokens = ( [start_token] + tokenizer.tokenize(sample[mention_text_key]) + [end_token] ) max_sub_seq_length = (max_sub_seq_length - len(mention_tokens)) // 2 context_left, context_right = sample[context_key] context_left = tokenizer.tokenize(context_left) context_right = tokenizer.tokenize(context_right) if len(context_left) > max_sub_seq_length: context_left = context_left[-max_sub_seq_length:] if len(context_right) > max_sub_seq_length: context_right = context_right[:max_sub_seq_length] context_tokens = context_left + mention_tokens + context_right return context_tokens
[ "def", "_get_context_token_representation", "(", "context_key", ",", "sample", ",", "tokenizer", ",", "max_sub_seq_length", ",", "start_token", ",", "end_token", ",", "mention_text_key", "=", "\"text\"", ",", "tagged", "=", "True", ",", ")", ":", "mention_tokens", "=", "(", "[", "start_token", "]", "+", "tokenizer", ".", "tokenize", "(", "sample", "[", "mention_text_key", "]", ")", "+", "[", "end_token", "]", ")", "max_sub_seq_length", "=", "(", "max_sub_seq_length", "-", "len", "(", "mention_tokens", ")", ")", "//", "2", "context_left", ",", "context_right", "=", "sample", "[", "context_key", "]", "context_left", "=", "tokenizer", ".", "tokenize", "(", "context_left", ")", "context_right", "=", "tokenizer", ".", "tokenize", "(", "context_right", ")", "if", "len", "(", "context_left", ")", ">", "max_sub_seq_length", ":", "context_left", "=", "context_left", "[", "-", "max_sub_seq_length", ":", "]", "if", "len", "(", "context_right", ")", ">", "max_sub_seq_length", ":", "context_right", "=", "context_right", "[", ":", "max_sub_seq_length", "]", "context_tokens", "=", "context_left", "+", "mention_tokens", "+", "context_right", "return", "context_tokens" ]
https://github.com/facebookresearch/BLINK/blob/5fe254dd64d37332347edc73738edcb56096183f/blink/candidate_ranking/bert_reranking.py#L363-L390
numba/numba
bf480b9e0da858a65508c2b17759a72ee6a44c51
numba/core/pythonapi.py
python
PythonAPI.recreate_record
(self, pdata, size, dtype, env_manager)
return self.builder.call(fn, [pdata, size, dtypeaddr])
[]
def recreate_record(self, pdata, size, dtype, env_manager): fnty = Type.function(self.pyobj, [Type.pointer(Type.int(8)), Type.int(), self.pyobj]) fn = self._get_function(fnty, name="numba_recreate_record") dtypeaddr = env_manager.read_const(env_manager.add_const(dtype)) return self.builder.call(fn, [pdata, size, dtypeaddr])
[ "def", "recreate_record", "(", "self", ",", "pdata", ",", "size", ",", "dtype", ",", "env_manager", ")", ":", "fnty", "=", "Type", ".", "function", "(", "self", ".", "pyobj", ",", "[", "Type", ".", "pointer", "(", "Type", ".", "int", "(", "8", ")", ")", ",", "Type", ".", "int", "(", ")", ",", "self", ".", "pyobj", "]", ")", "fn", "=", "self", ".", "_get_function", "(", "fnty", ",", "name", "=", "\"numba_recreate_record\"", ")", "dtypeaddr", "=", "env_manager", ".", "read_const", "(", "env_manager", ".", "add_const", "(", "dtype", ")", ")", "return", "self", ".", "builder", ".", "call", "(", "fn", ",", "[", "pdata", ",", "size", ",", "dtypeaddr", "]", ")" ]
https://github.com/numba/numba/blob/bf480b9e0da858a65508c2b17759a72ee6a44c51/numba/core/pythonapi.py#L1544-L1549
collinsctk/PyQYT
7af3673955f94ff1b2df2f94220cd2dab2e252af
ExtentionPackages/pysmi/parser/smi.py
python
SmiV2Parser.p_importedKeyword
(self, p)
importedKeyword : importedSMIKeyword | BITS | INTEGER32 | IPADDRESS | MANDATORY_GROUPS | MODULE_COMPLIANCE | MODULE_IDENTITY | OBJECT_GROUP | OBJECT_IDENTITY | OBJECT_TYPE | OPAQUE | TEXTUAL_CONVENTION | TIMETICKS | UNSIGNED32
importedKeyword : importedSMIKeyword | BITS | INTEGER32 | IPADDRESS | MANDATORY_GROUPS | MODULE_COMPLIANCE | MODULE_IDENTITY | OBJECT_GROUP | OBJECT_IDENTITY | OBJECT_TYPE | OPAQUE | TEXTUAL_CONVENTION | TIMETICKS | UNSIGNED32
[ "importedKeyword", ":", "importedSMIKeyword", "|", "BITS", "|", "INTEGER32", "|", "IPADDRESS", "|", "MANDATORY_GROUPS", "|", "MODULE_COMPLIANCE", "|", "MODULE_IDENTITY", "|", "OBJECT_GROUP", "|", "OBJECT_IDENTITY", "|", "OBJECT_TYPE", "|", "OPAQUE", "|", "TEXTUAL_CONVENTION", "|", "TIMETICKS", "|", "UNSIGNED32" ]
def p_importedKeyword(self, p): """importedKeyword : importedSMIKeyword | BITS | INTEGER32 | IPADDRESS | MANDATORY_GROUPS | MODULE_COMPLIANCE | MODULE_IDENTITY | OBJECT_GROUP | OBJECT_IDENTITY | OBJECT_TYPE | OPAQUE | TEXTUAL_CONVENTION | TIMETICKS | UNSIGNED32""" p[0] = p[1]
[ "def", "p_importedKeyword", "(", "self", ",", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
https://github.com/collinsctk/PyQYT/blob/7af3673955f94ff1b2df2f94220cd2dab2e252af/ExtentionPackages/pysmi/parser/smi.py#L157-L172
linxid/Machine_Learning_Study_Path
558e82d13237114bbb8152483977806fc0c222af
Machine Learning In Action/Chapter4-NaiveBayes/venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py
python
get_config_var
(name)
return get_config_vars().get(name)
Return the value of a single variable using the dictionary returned by 'get_config_vars()'. Equivalent to get_config_vars().get(name)
Return the value of a single variable using the dictionary returned by 'get_config_vars()'.
[ "Return", "the", "value", "of", "a", "single", "variable", "using", "the", "dictionary", "returned", "by", "get_config_vars", "()", "." ]
def get_config_var(name): """Return the value of a single variable using the dictionary returned by 'get_config_vars()'. Equivalent to get_config_vars().get(name) """ return get_config_vars().get(name)
[ "def", "get_config_var", "(", "name", ")", ":", "return", "get_config_vars", "(", ")", ".", "get", "(", "name", ")" ]
https://github.com/linxid/Machine_Learning_Study_Path/blob/558e82d13237114bbb8152483977806fc0c222af/Machine Learning In Action/Chapter4-NaiveBayes/venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py#L594-L600
lsbardel/python-stdnet
78db5320bdedc3f28c5e4f38cda13a4469e35db7
stdnet/utils/dates.py
python
Interval.__reduce__
(self)
return tuple, (tuple(self),)
[]
def __reduce__(self): return tuple, (tuple(self),)
[ "def", "__reduce__", "(", "self", ")", ":", "return", "tuple", ",", "(", "tuple", "(", "self", ")", ",", ")" ]
https://github.com/lsbardel/python-stdnet/blob/78db5320bdedc3f28c5e4f38cda13a4469e35db7/stdnet/utils/dates.py#L12-L13
gem/oq-engine
1bdb88f3914e390abcbd285600bfd39477aae47c
openquake/hazardlib/gsim/faccioli_cauzzi_2006.py
python
FaccioliCauzzi2006.compute
(self, ctx: np.recarray, imts, mean, sig, tau, phi)
See :meth:`superclass method <.base.GroundShakingIntensityModel.compute>` for spec of input and result values.
See :meth:`superclass method <.base.GroundShakingIntensityModel.compute>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "compute", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
def compute(self, ctx: np.recarray, imts, mean, sig, tau, phi): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.compute>` for spec of input and result values. """ for m, imt in enumerate(imts): C = self.COEFFS[imt] d = np.sqrt(ctx.repi**2 + C['h']**2) term01 = C['c3'] * (np.log(d)) mean[m] = C['c1'] + C['c2'] * ctx.mag + term01 sig[m] = C['sigma']
[ "def", "compute", "(", "self", ",", "ctx", ":", "np", ".", "recarray", ",", "imts", ",", "mean", ",", "sig", ",", "tau", ",", "phi", ")", ":", "for", "m", ",", "imt", "in", "enumerate", "(", "imts", ")", ":", "C", "=", "self", ".", "COEFFS", "[", "imt", "]", "d", "=", "np", ".", "sqrt", "(", "ctx", ".", "repi", "**", "2", "+", "C", "[", "'h'", "]", "**", "2", ")", "term01", "=", "C", "[", "'c3'", "]", "*", "(", "np", ".", "log", "(", "d", ")", ")", "mean", "[", "m", "]", "=", "C", "[", "'c1'", "]", "+", "C", "[", "'c2'", "]", "*", "ctx", ".", "mag", "+", "term01", "sig", "[", "m", "]", "=", "C", "[", "'sigma'", "]" ]
https://github.com/gem/oq-engine/blob/1bdb88f3914e390abcbd285600bfd39477aae47c/openquake/hazardlib/gsim/faccioli_cauzzi_2006.py#L56-L67
fluentpython/notebooks
0f6e1e8d1686743dacd9281df7c5b5921812010a
attic/control/adder/coroadder_deco.py
python
coroutine
(func)
return primed_coroutine
[]
def coroutine(func): def primed_coroutine(*args, **kwargs): coro = func(*args, **kwargs) next(coro) return coro return primed_coroutine
[ "def", "coroutine", "(", "func", ")", ":", "def", "primed_coroutine", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "coro", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "next", "(", "coro", ")", "return", "coro", "return", "primed_coroutine" ]
https://github.com/fluentpython/notebooks/blob/0f6e1e8d1686743dacd9281df7c5b5921812010a/attic/control/adder/coroadder_deco.py#L44-L49
cupy/cupy
a47ad3105f0fe817a4957de87d98ddccb8c7491f
cupy/_indexing/indexing.py
python
take_along_axis
(a, indices, axis)
return a[tuple(fancy_index)]
Take values from the input array by matching 1d index and data slices. Args: a (cupy.ndarray): Array to extract elements. indices (cupy.ndarray): Indices to take along each 1d slice of ``a``. axis (int): The axis to take 1d slices along. Returns: cupy.ndarray: The indexed result. .. seealso:: :func:`numpy.take_along_axis`
Take values from the input array by matching 1d index and data slices.
[ "Take", "values", "from", "the", "input", "array", "by", "matching", "1d", "index", "and", "data", "slices", "." ]
def take_along_axis(a, indices, axis): """Take values from the input array by matching 1d index and data slices. Args: a (cupy.ndarray): Array to extract elements. indices (cupy.ndarray): Indices to take along each 1d slice of ``a``. axis (int): The axis to take 1d slices along. Returns: cupy.ndarray: The indexed result. .. seealso:: :func:`numpy.take_along_axis` """ if indices.dtype.kind not in ('i', 'u'): raise IndexError('`indices` must be an integer array') if axis is None: a = a.ravel() axis = 0 ndim = a.ndim axis = internal._normalize_axis_index(axis, ndim) if ndim != indices.ndim: raise ValueError( '`indices` and `a` must have the same number of dimensions') fancy_index = [] for i, n in enumerate(a.shape): if i == axis: fancy_index.append(indices) else: ind_shape = (1,) * i + (-1,) + (1,) * (ndim - i - 1) fancy_index.append(cupy.arange(n).reshape(ind_shape)) return a[tuple(fancy_index)]
[ "def", "take_along_axis", "(", "a", ",", "indices", ",", "axis", ")", ":", "if", "indices", ".", "dtype", ".", "kind", "not", "in", "(", "'i'", ",", "'u'", ")", ":", "raise", "IndexError", "(", "'`indices` must be an integer array'", ")", "if", "axis", "is", "None", ":", "a", "=", "a", ".", "ravel", "(", ")", "axis", "=", "0", "ndim", "=", "a", ".", "ndim", "axis", "=", "internal", ".", "_normalize_axis_index", "(", "axis", ",", "ndim", ")", "if", "ndim", "!=", "indices", ".", "ndim", ":", "raise", "ValueError", "(", "'`indices` and `a` must have the same number of dimensions'", ")", "fancy_index", "=", "[", "]", "for", "i", ",", "n", "in", "enumerate", "(", "a", ".", "shape", ")", ":", "if", "i", "==", "axis", ":", "fancy_index", ".", "append", "(", "indices", ")", "else", ":", "ind_shape", "=", "(", "1", ",", ")", "*", "i", "+", "(", "-", "1", ",", ")", "+", "(", "1", ",", ")", "*", "(", "ndim", "-", "i", "-", "1", ")", "fancy_index", ".", "append", "(", "cupy", ".", "arange", "(", "n", ")", ".", "reshape", "(", "ind_shape", ")", ")", "return", "a", "[", "tuple", "(", "fancy_index", ")", "]" ]
https://github.com/cupy/cupy/blob/a47ad3105f0fe817a4957de87d98ddccb8c7491f/cupy/_indexing/indexing.py#L31-L68
kubernetes-client/python
47b9da9de2d02b2b7a34fbe05afb44afd130d73a
kubernetes/client/api/node_v1_api.py
python
NodeV1Api.get_api_resources_with_http_info
(self, **kwargs)
return self.api_client.call_api( '/apis/node.k8s.io/v1/', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1APIResourceList', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
get_api_resources # noqa: E501 get available resources # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_api_resources_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(V1APIResourceList, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread.
get_api_resources # noqa: E501
[ "get_api_resources", "#", "noqa", ":", "E501" ]
def get_api_resources_with_http_info(self, **kwargs): # noqa: E501 """get_api_resources # noqa: E501 get available resources # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_api_resources_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(V1APIResourceList, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method get_api_resources" % key ) local_var_params[key] = val del local_var_params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/node.k8s.io/v1/', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1APIResourceList', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
[ "def", "get_api_resources_with_http_info", "(", "self", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "local_var_params", "=", "locals", "(", ")", "all_params", "=", "[", "]", "all_params", ".", "extend", "(", "[", "'async_req'", ",", "'_return_http_data_only'", ",", "'_preload_content'", ",", "'_request_timeout'", "]", ")", "for", "key", ",", "val", "in", "six", ".", "iteritems", "(", "local_var_params", "[", "'kwargs'", "]", ")", ":", "if", "key", "not", "in", "all_params", ":", "raise", "ApiTypeError", "(", "\"Got an unexpected keyword argument '%s'\"", "\" to method get_api_resources\"", "%", "key", ")", "local_var_params", "[", "key", "]", "=", "val", "del", "local_var_params", "[", "'kwargs'", "]", "collection_formats", "=", "{", "}", "path_params", "=", "{", "}", "query_params", "=", "[", "]", "header_params", "=", "{", "}", "form_params", "=", "[", "]", "local_var_files", "=", "{", "}", "body_params", "=", "None", "# HTTP header `Accept`", "header_params", "[", "'Accept'", "]", "=", "self", ".", "api_client", ".", "select_header_accept", "(", "[", "'application/json'", ",", "'application/yaml'", ",", "'application/vnd.kubernetes.protobuf'", "]", ")", "# noqa: E501", "# Authentication setting", "auth_settings", "=", "[", "'BearerToken'", "]", "# noqa: E501", "return", "self", ".", "api_client", ".", "call_api", "(", "'/apis/node.k8s.io/v1/'", ",", "'GET'", ",", "path_params", ",", "query_params", ",", "header_params", ",", "body", "=", "body_params", ",", "post_params", "=", "form_params", ",", "files", "=", "local_var_files", ",", "response_type", "=", "'V1APIResourceList'", ",", "# noqa: E501", "auth_settings", "=", "auth_settings", ",", "async_req", "=", "local_var_params", ".", "get", "(", "'async_req'", ")", ",", "_return_http_data_only", "=", "local_var_params", ".", "get", "(", "'_return_http_data_only'", ")", ",", "# noqa: E501", "_preload_content", "=", "local_var_params", ".", "get", "(", "'_preload_content'", ",", "True", ")", ",", "_request_timeout", "=", "local_var_params", ".", "get", "(", "'_request_timeout'", ")", ",", "collection_formats", "=", "collection_formats", ")" ]
https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/api/node_v1_api.py#L506-L585
P1sec/pycrate
d12bbccf1df8c9c7891a26967a9d2635610ec5b8
pycrate_core/base.py
python
UintLE._to_pack
(self)
Produces a tuple ready to be packed with pack_val() according to its internal value
Produces a tuple ready to be packed with pack_val() according to its internal value
[ "Produces", "a", "tuple", "ready", "to", "be", "packed", "with", "pack_val", "()", "according", "to", "its", "internal", "value" ]
def _to_pack(self): """Produces a tuple ready to be packed with pack_val() according to its internal value """ if not self.get_trans(): return [(TYPE_UINT_LE, self.get_val(), self.get_bl())] else: return []
[ "def", "_to_pack", "(", "self", ")", ":", "if", "not", "self", ".", "get_trans", "(", ")", ":", "return", "[", "(", "TYPE_UINT_LE", ",", "self", ".", "get_val", "(", ")", ",", "self", ".", "get_bl", "(", ")", ")", "]", "else", ":", "return", "[", "]" ]
https://github.com/P1sec/pycrate/blob/d12bbccf1df8c9c7891a26967a9d2635610ec5b8/pycrate_core/base.py#L762-L769
intel/fMBT
a221c55cd7b6367aa458781b134ae155aa47a71f
utils/fmbtgti.py
python
_Paths.oirArgsList
(self, bitmap)
Returns list of alternative OIR parameters associated to the bitmap by appropriate .fmbtoirrc file
Returns list of alternative OIR parameters associated to the bitmap by appropriate .fmbtoirrc file
[ "Returns", "list", "of", "alternative", "OIR", "parameters", "associated", "to", "the", "bitmap", "by", "appropriate", ".", "fmbtoirrc", "file" ]
def oirArgsList(self, bitmap): """Returns list of alternative OIR parameters associated to the bitmap by appropriate .fmbtoirrc file """ if bitmap in self._oirAL: return self._oirAL[bitmap] else: absBitmap = self.abspaths(bitmap)[0] if absBitmap in self._oirAL: return self._oirAL[absBitmap] else: return None
[ "def", "oirArgsList", "(", "self", ",", "bitmap", ")", ":", "if", "bitmap", "in", "self", ".", "_oirAL", ":", "return", "self", ".", "_oirAL", "[", "bitmap", "]", "else", ":", "absBitmap", "=", "self", ".", "abspaths", "(", "bitmap", ")", "[", "0", "]", "if", "absBitmap", "in", "self", ".", "_oirAL", ":", "return", "self", ".", "_oirAL", "[", "absBitmap", "]", "else", ":", "return", "None" ]
https://github.com/intel/fMBT/blob/a221c55cd7b6367aa458781b134ae155aa47a71f/utils/fmbtgti.py#L1399-L1410
tp4a/teleport
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
server/www/packages/packages-linux/x64/qrcode/image/svg.py
python
SvgPathImage.drawrect
(self, row, col)
[]
def drawrect(self, row, col): # (x, y) self._points.add((col, row))
[ "def", "drawrect", "(", "self", ",", "row", ",", "col", ")", ":", "# (x, y)", "self", ".", "_points", ".", "add", "(", "(", "col", ",", "row", ")", ")" ]
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-linux/x64/qrcode/image/svg.py#L111-L113
ARISE-Initiative/robosuite
a5dfaf03cd769170881a1931d8f19c8eb72f531a
robosuite/controllers/base_controller.py
python
Controller.reset_goal
(self)
Resets the goal -- usually by setting to the goal to all zeros, but in some cases may be different (e.g.: OSC)
Resets the goal -- usually by setting to the goal to all zeros, but in some cases may be different (e.g.: OSC)
[ "Resets", "the", "goal", "--", "usually", "by", "setting", "to", "the", "goal", "to", "all", "zeros", "but", "in", "some", "cases", "may", "be", "different", "(", "e", ".", "g", ".", ":", "OSC", ")" ]
def reset_goal(self): """ Resets the goal -- usually by setting to the goal to all zeros, but in some cases may be different (e.g.: OSC) """ raise NotImplementedError
[ "def", "reset_goal", "(", "self", ")", ":", "raise", "NotImplementedError" ]
https://github.com/ARISE-Initiative/robosuite/blob/a5dfaf03cd769170881a1931d8f19c8eb72f531a/robosuite/controllers/base_controller.py#L202-L206
jazzband/django-authority
f58bbef27eca18a818fce6f5c25e651c552386c1
authority/templatetags/permissions.py
python
permission_request_delete_link
(context, perm)
return {"url": None}
Renders a html link to the delete view of the given permission request. Returns no content if the request-user has no permission to delete foreign permissions.
Renders a html link to the delete view of the given permission request. Returns no content if the request-user has no permission to delete foreign permissions.
[ "Renders", "a", "html", "link", "to", "the", "delete", "view", "of", "the", "given", "permission", "request", ".", "Returns", "no", "content", "if", "the", "request", "-", "user", "has", "no", "permission", "to", "delete", "foreign", "permissions", "." ]
def permission_request_delete_link(context, perm): """ Renders a html link to the delete view of the given permission request. Returns no content if the request-user has no permission to delete foreign permissions. """ user = context["request"].user if user.is_authenticated: link_kwargs = base_link(context, perm, "authority-delete-permission-request") if user.has_perm("authority.delete_permission"): link_kwargs["is_requestor"] = False return link_kwargs if not perm.approved and perm.user == user: link_kwargs["is_requestor"] = True return link_kwargs return {"url": None}
[ "def", "permission_request_delete_link", "(", "context", ",", "perm", ")", ":", "user", "=", "context", "[", "\"request\"", "]", ".", "user", "if", "user", ".", "is_authenticated", ":", "link_kwargs", "=", "base_link", "(", "context", ",", "perm", ",", "\"authority-delete-permission-request\"", ")", "if", "user", ".", "has_perm", "(", "\"authority.delete_permission\"", ")", ":", "link_kwargs", "[", "\"is_requestor\"", "]", "=", "False", "return", "link_kwargs", "if", "not", "perm", ".", "approved", "and", "perm", ".", "user", "==", "user", ":", "link_kwargs", "[", "\"is_requestor\"", "]", "=", "True", "return", "link_kwargs", "return", "{", "\"url\"", ":", "None", "}" ]
https://github.com/jazzband/django-authority/blob/f58bbef27eca18a818fce6f5c25e651c552386c1/authority/templatetags/permissions.py#L435-L450
django-nonrel/django-nonrel
4fbfe7344481a5eab8698f79207f09124310131b
django/template/base.py
python
Variable._resolve_lookup
(self, context)
return current
Performs resolution of a real variable (i.e. not a literal) against the given context. As indicated by the method's name, this method is an implementation detail and shouldn't be called by external code. Use Variable.resolve() instead.
Performs resolution of a real variable (i.e. not a literal) against the given context.
[ "Performs", "resolution", "of", "a", "real", "variable", "(", "i", ".", "e", ".", "not", "a", "literal", ")", "against", "the", "given", "context", "." ]
def _resolve_lookup(self, context): """ Performs resolution of a real variable (i.e. not a literal) against the given context. As indicated by the method's name, this method is an implementation detail and shouldn't be called by external code. Use Variable.resolve() instead. """ current = context try: # catch-all for silent variable failures for bit in self.lookups: try: # dictionary lookup current = current[bit] except (TypeError, AttributeError, KeyError): try: # attribute lookup current = getattr(current, bit) except (TypeError, AttributeError): try: # list-index lookup current = current[int(bit)] except (IndexError, # list index out of range ValueError, # invalid literal for int() KeyError, # current is a dict without `int(bit)` key TypeError, # unsubscriptable object ): raise VariableDoesNotExist("Failed lookup for key [%s] in %r", (bit, current)) # missing attribute if callable(current): if getattr(current, 'alters_data', False): current = settings.TEMPLATE_STRING_IF_INVALID else: try: # method call (assuming no args required) current = current() except TypeError: # arguments *were* required # GOTCHA: This will also catch any TypeError # raised in the function itself. current = settings.TEMPLATE_STRING_IF_INVALID # invalid method call except Exception, e: if getattr(e, 'silent_variable_failure', False): current = settings.TEMPLATE_STRING_IF_INVALID else: raise return current
[ "def", "_resolve_lookup", "(", "self", ",", "context", ")", ":", "current", "=", "context", "try", ":", "# catch-all for silent variable failures", "for", "bit", "in", "self", ".", "lookups", ":", "try", ":", "# dictionary lookup", "current", "=", "current", "[", "bit", "]", "except", "(", "TypeError", ",", "AttributeError", ",", "KeyError", ")", ":", "try", ":", "# attribute lookup", "current", "=", "getattr", "(", "current", ",", "bit", ")", "except", "(", "TypeError", ",", "AttributeError", ")", ":", "try", ":", "# list-index lookup", "current", "=", "current", "[", "int", "(", "bit", ")", "]", "except", "(", "IndexError", ",", "# list index out of range", "ValueError", ",", "# invalid literal for int()", "KeyError", ",", "# current is a dict without `int(bit)` key", "TypeError", ",", "# unsubscriptable object", ")", ":", "raise", "VariableDoesNotExist", "(", "\"Failed lookup for key [%s] in %r\"", ",", "(", "bit", ",", "current", ")", ")", "# missing attribute", "if", "callable", "(", "current", ")", ":", "if", "getattr", "(", "current", ",", "'alters_data'", ",", "False", ")", ":", "current", "=", "settings", ".", "TEMPLATE_STRING_IF_INVALID", "else", ":", "try", ":", "# method call (assuming no args required)", "current", "=", "current", "(", ")", "except", "TypeError", ":", "# arguments *were* required", "# GOTCHA: This will also catch any TypeError", "# raised in the function itself.", "current", "=", "settings", ".", "TEMPLATE_STRING_IF_INVALID", "# invalid method call", "except", "Exception", ",", "e", ":", "if", "getattr", "(", "e", ",", "'silent_variable_failure'", ",", "False", ")", ":", "current", "=", "settings", ".", "TEMPLATE_STRING_IF_INVALID", "else", ":", "raise", "return", "current" ]
https://github.com/django-nonrel/django-nonrel/blob/4fbfe7344481a5eab8698f79207f09124310131b/django/template/base.py#L667-L709
rootpy/rootpy
3926935e1f2100d8ba68070c2ab44055d4800f73
rootpy/extern/lockfile/__init__.py
python
LockBase.is_locked
(self)
Tell whether or not the file is locked.
Tell whether or not the file is locked.
[ "Tell", "whether", "or", "not", "the", "file", "is", "locked", "." ]
def is_locked(self): """ Tell whether or not the file is locked. """ raise NotImplemented("implement in subclass")
[ "def", "is_locked", "(", "self", ")", ":", "raise", "NotImplemented", "(", "\"implement in subclass\"", ")" ]
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/extern/lockfile/__init__.py#L216-L220
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
desktop/core/ext-py/eventlet-0.24.1/eventlet/green/zmq.py
python
Socket.send
(self, msg, flags=0, copy=True, track=False)
A send method that's safe to use when multiple greenthreads are calling send, send_multipart, recv and recv_multipart on the same socket.
A send method that's safe to use when multiple greenthreads are calling send, send_multipart, recv and recv_multipart on the same socket.
[ "A", "send", "method", "that", "s", "safe", "to", "use", "when", "multiple", "greenthreads", "are", "calling", "send", "send_multipart", "recv", "and", "recv_multipart", "on", "the", "same", "socket", "." ]
def send(self, msg, flags=0, copy=True, track=False): """A send method that's safe to use when multiple greenthreads are calling send, send_multipart, recv and recv_multipart on the same socket. """ if flags & NOBLOCK: result = _Socket_send(self, msg, flags, copy, track) # Instead of calling both wake methods, could call # self.getsockopt(EVENTS) which would trigger wakeups if # needed. self._eventlet_send_event.wake() self._eventlet_recv_event.wake() return result # TODO: pyzmq will copy the message buffer and create Message # objects under some circumstances. We could do that work here # once to avoid doing it every time the send is retried. flags |= NOBLOCK with self._eventlet_send_lock: while True: try: return _Socket_send(self, msg, flags, copy, track) except ZMQError as e: if e.errno == EAGAIN: self._eventlet_send_event.block() else: raise finally: # The call to send processes 0mq events and may # make the socket ready to recv. Wake the next # receiver. (Could check EVENTS for POLLIN here) self._eventlet_recv_event.wake()
[ "def", "send", "(", "self", ",", "msg", ",", "flags", "=", "0", ",", "copy", "=", "True", ",", "track", "=", "False", ")", ":", "if", "flags", "&", "NOBLOCK", ":", "result", "=", "_Socket_send", "(", "self", ",", "msg", ",", "flags", ",", "copy", ",", "track", ")", "# Instead of calling both wake methods, could call", "# self.getsockopt(EVENTS) which would trigger wakeups if", "# needed.", "self", ".", "_eventlet_send_event", ".", "wake", "(", ")", "self", ".", "_eventlet_recv_event", ".", "wake", "(", ")", "return", "result", "# TODO: pyzmq will copy the message buffer and create Message", "# objects under some circumstances. We could do that work here", "# once to avoid doing it every time the send is retried.", "flags", "|=", "NOBLOCK", "with", "self", ".", "_eventlet_send_lock", ":", "while", "True", ":", "try", ":", "return", "_Socket_send", "(", "self", ",", "msg", ",", "flags", ",", "copy", ",", "track", ")", "except", "ZMQError", "as", "e", ":", "if", "e", ".", "errno", "==", "EAGAIN", ":", "self", ".", "_eventlet_send_event", ".", "block", "(", ")", "else", ":", "raise", "finally", ":", "# The call to send processes 0mq events and may", "# make the socket ready to recv. Wake the next", "# receiver. (Could check EVENTS for POLLIN here)", "self", ".", "_eventlet_recv_event", ".", "wake", "(", ")" ]
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/eventlet-0.24.1/eventlet/green/zmq.py#L276-L307
metabrainz/listenbrainz-server
391a0b91ac3a48398027467651ce3160765c7f37
listenbrainz/webserver/views/user.py
python
recommendation_playlists
(user_name: str)
return render_template( "playlists/playlists.html", active_section="recommendations", props=ujson.dumps(props), user=user )
Show playlists created for user
Show playlists created for user
[ "Show", "playlists", "created", "for", "user" ]
def recommendation_playlists(user_name: str): """ Show playlists created for user """ offset = request.args.get('offset', 0) try: offset = int(offset) except ValueError: raise BadRequest("Incorrect int argument offset: %s" % request.args.get("offset")) count = request.args.get("count", DEFAULT_NUMBER_OF_PLAYLISTS_PER_CALL) try: count = int(count) except ValueError: raise BadRequest("Incorrect int argument count: %s" % request.args.get("count")) user = _get_user(user_name) user_data = { "name": user.musicbrainz_id, "id": user.id, } playlists = [] user_playlists, playlist_count = get_playlists_created_for_user( user.id, False, count, offset) for playlist in user_playlists: playlists.append(serialize_jspf(playlist)) props = { "playlists": playlists, "user": user_data, "active_section": "recommendations", "playlist_count": playlist_count, } return render_template( "playlists/playlists.html", active_section="recommendations", props=ujson.dumps(props), user=user )
[ "def", "recommendation_playlists", "(", "user_name", ":", "str", ")", ":", "offset", "=", "request", ".", "args", ".", "get", "(", "'offset'", ",", "0", ")", "try", ":", "offset", "=", "int", "(", "offset", ")", "except", "ValueError", ":", "raise", "BadRequest", "(", "\"Incorrect int argument offset: %s\"", "%", "request", ".", "args", ".", "get", "(", "\"offset\"", ")", ")", "count", "=", "request", ".", "args", ".", "get", "(", "\"count\"", ",", "DEFAULT_NUMBER_OF_PLAYLISTS_PER_CALL", ")", "try", ":", "count", "=", "int", "(", "count", ")", "except", "ValueError", ":", "raise", "BadRequest", "(", "\"Incorrect int argument count: %s\"", "%", "request", ".", "args", ".", "get", "(", "\"count\"", ")", ")", "user", "=", "_get_user", "(", "user_name", ")", "user_data", "=", "{", "\"name\"", ":", "user", ".", "musicbrainz_id", ",", "\"id\"", ":", "user", ".", "id", ",", "}", "playlists", "=", "[", "]", "user_playlists", ",", "playlist_count", "=", "get_playlists_created_for_user", "(", "user", ".", "id", ",", "False", ",", "count", ",", "offset", ")", "for", "playlist", "in", "user_playlists", ":", "playlists", ".", "append", "(", "serialize_jspf", "(", "playlist", ")", ")", "props", "=", "{", "\"playlists\"", ":", "playlists", ",", "\"user\"", ":", "user_data", ",", "\"active_section\"", ":", "\"recommendations\"", ",", "\"playlist_count\"", ":", "playlist_count", ",", "}", "return", "render_template", "(", "\"playlists/playlists.html\"", ",", "active_section", "=", "\"recommendations\"", ",", "props", "=", "ujson", ".", "dumps", "(", "props", ")", ",", "user", "=", "user", ")" ]
https://github.com/metabrainz/listenbrainz-server/blob/391a0b91ac3a48398027467651ce3160765c7f37/listenbrainz/webserver/views/user.py#L269-L309
beeware/ouroboros
a29123c6fab6a807caffbb7587cf548e0c370296
ouroboros/decimal.py
python
Decimal.to_eng_string
(self, context=None)
return self.__str__(eng=True, context=context)
Convert to engineering-type string. Engineering notation has an exponent which is a multiple of 3, so there are up to 3 digits left of the decimal place. Same rules for when in exponential and when as a value as in __str__.
Convert to engineering-type string.
[ "Convert", "to", "engineering", "-", "type", "string", "." ]
def to_eng_string(self, context=None): """Convert to engineering-type string. Engineering notation has an exponent which is a multiple of 3, so there are up to 3 digits left of the decimal place. Same rules for when in exponential and when as a value as in __str__. """ return self.__str__(eng=True, context=context)
[ "def", "to_eng_string", "(", "self", ",", "context", "=", "None", ")", ":", "return", "self", ".", "__str__", "(", "eng", "=", "True", ",", "context", "=", "context", ")" ]
https://github.com/beeware/ouroboros/blob/a29123c6fab6a807caffbb7587cf548e0c370296/ouroboros/decimal.py#L1077-L1085
fonttools/fonttools
892322aaff6a89bea5927379ec06bc0da3dfb7df
Lib/fontTools/varLib/merger.py
python
_PairPosFormat2_merge
(self, lst, merger)
[]
def _PairPosFormat2_merge(self, lst, merger): assert allEqual([l.ValueFormat2 == 0 for l in lst if l.Class1Record]), "Report bug against fonttools." merger.mergeObjects(self, lst, exclude=('Coverage', 'ClassDef1', 'Class1Count', 'ClassDef2', 'Class2Count', 'Class1Record', 'ValueFormat1', 'ValueFormat2')) # Align coverages glyphs, _ = _merge_GlyphOrders(merger.font, [v.Coverage.glyphs for v in lst]) self.Coverage.glyphs = glyphs # Currently, if the coverage of PairPosFormat2 subtables are different, # we do NOT bother walking down the subtable list when filling in new # rows for alignment. As such, this is only correct if current subtable # is the last subtable in the lookup. Ensure that. # # Note that our canonicalization process merges trailing PairPosFormat2's, # so in reality this is rare. for l,subtables in zip(lst,merger.lookup_subtables): if l.Coverage.glyphs != glyphs: assert l == subtables[-1] matrices = _PairPosFormat2_align_matrices(self, lst, merger.font) self.Class1Record = list(matrices[0]) # TODO move merger to be selfless merger.mergeLists(self.Class1Record, matrices)
[ "def", "_PairPosFormat2_merge", "(", "self", ",", "lst", ",", "merger", ")", ":", "assert", "allEqual", "(", "[", "l", ".", "ValueFormat2", "==", "0", "for", "l", "in", "lst", "if", "l", ".", "Class1Record", "]", ")", ",", "\"Report bug against fonttools.\"", "merger", ".", "mergeObjects", "(", "self", ",", "lst", ",", "exclude", "=", "(", "'Coverage'", ",", "'ClassDef1'", ",", "'Class1Count'", ",", "'ClassDef2'", ",", "'Class2Count'", ",", "'Class1Record'", ",", "'ValueFormat1'", ",", "'ValueFormat2'", ")", ")", "# Align coverages", "glyphs", ",", "_", "=", "_merge_GlyphOrders", "(", "merger", ".", "font", ",", "[", "v", ".", "Coverage", ".", "glyphs", "for", "v", "in", "lst", "]", ")", "self", ".", "Coverage", ".", "glyphs", "=", "glyphs", "# Currently, if the coverage of PairPosFormat2 subtables are different,", "# we do NOT bother walking down the subtable list when filling in new", "# rows for alignment. As such, this is only correct if current subtable", "# is the last subtable in the lookup. Ensure that.", "#", "# Note that our canonicalization process merges trailing PairPosFormat2's,", "# so in reality this is rare.", "for", "l", ",", "subtables", "in", "zip", "(", "lst", ",", "merger", ".", "lookup_subtables", ")", ":", "if", "l", ".", "Coverage", ".", "glyphs", "!=", "glyphs", ":", "assert", "l", "==", "subtables", "[", "-", "1", "]", "matrices", "=", "_PairPosFormat2_align_matrices", "(", "self", ",", "lst", ",", "merger", ".", "font", ")", "self", ".", "Class1Record", "=", "list", "(", "matrices", "[", "0", "]", ")", "# TODO move merger to be selfless", "merger", ".", "mergeLists", "(", "self", ".", "Class1Record", ",", "matrices", ")" ]
https://github.com/fonttools/fonttools/blob/892322aaff6a89bea5927379ec06bc0da3dfb7df/Lib/fontTools/varLib/merger.py#L491-L520
dexy/dexy
323c1806e51f75435e11d2265703e68f46c8aef3
dexy/node.py
python
Node.__call__
(self, *args, **kw)
[]
def __call__(self, *args, **kw): for inpt in self.inputs: for task in inpt: task() self.wrapper.current_task = self self.run() self.wrapper.current_task = None
[ "def", "__call__", "(", "self", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "for", "inpt", "in", "self", ".", "inputs", ":", "for", "task", "in", "inpt", ":", "task", "(", ")", "self", ".", "wrapper", ".", "current_task", "=", "self", "self", ".", "run", "(", ")", "self", ".", "wrapper", ".", "current_task", "=", "None" ]
https://github.com/dexy/dexy/blob/323c1806e51f75435e11d2265703e68f46c8aef3/dexy/node.py#L257-L263
rwth-i6/returnn
f2d718a197a280b0d5f0fd91a7fcb8658560dddb
returnn/config.py
python
network_json_from_config
(config, mask=None)
return json_content
:type config: Config :param str mask: "unity", "none" or "dropout" :rtype: dict[str]
:type config: Config :param str mask: "unity", "none" or "dropout" :rtype: dict[str]
[ ":", "type", "config", ":", "Config", ":", "param", "str", "mask", ":", "unity", "none", "or", "dropout", ":", "rtype", ":", "dict", "[", "str", "]" ]
def network_json_from_config(config, mask=None): """ :type config: Config :param str mask: "unity", "none" or "dropout" :rtype: dict[str] """ from returnn.log import log json_content = None if config.has("network") and config.is_typed("network"): json_content = config.typed_value("network") assert isinstance(json_content, dict) assert json_content elif config.network_topology_json: start_var = config.network_topology_json.find('(config:', 0) # e.g. ..., "n_out" : (config:var), ... while start_var > 0: end_var = config.network_topology_json.find(')', start_var) assert end_var > 0, "invalid variable syntax at " + str(start_var) var = config.network_topology_json[start_var+8:end_var] assert config.has(var), "could not find variable " + var config.network_topology_json = ( config.network_topology_json[:start_var] + config.value(var, "") + config.network_topology_json[end_var+1:]) print("substituting variable %s with %s" % (var, config.value(var, "")), file=log.v4) start_var = config.network_topology_json.find('(config:', start_var+1) try: import json json_content = json.loads(config.network_topology_json) except ValueError as e: print("----- BEGIN JSON CONTENT -----", file=log.v3) print(config.network_topology_json, file=log.v3) print("------ END JSON CONTENT ------", file=log.v3) assert False, "invalid json content, %r" % e assert isinstance(json_content, dict) if 'network' in json_content: json_content = json_content['network'] assert json_content if not json_content: if not mask: if sum(config.float_list('dropout', [0])) > 0.0: mask = "dropout" from returnn.network_description import LayerNetworkDescription description = LayerNetworkDescription.from_config(config) json_content = description.to_json_content(mask=mask) return json_content
[ "def", "network_json_from_config", "(", "config", ",", "mask", "=", "None", ")", ":", "from", "returnn", ".", "log", "import", "log", "json_content", "=", "None", "if", "config", ".", "has", "(", "\"network\"", ")", "and", "config", ".", "is_typed", "(", "\"network\"", ")", ":", "json_content", "=", "config", ".", "typed_value", "(", "\"network\"", ")", "assert", "isinstance", "(", "json_content", ",", "dict", ")", "assert", "json_content", "elif", "config", ".", "network_topology_json", ":", "start_var", "=", "config", ".", "network_topology_json", ".", "find", "(", "'(config:'", ",", "0", ")", "# e.g. ..., \"n_out\" : (config:var), ...", "while", "start_var", ">", "0", ":", "end_var", "=", "config", ".", "network_topology_json", ".", "find", "(", "')'", ",", "start_var", ")", "assert", "end_var", ">", "0", ",", "\"invalid variable syntax at \"", "+", "str", "(", "start_var", ")", "var", "=", "config", ".", "network_topology_json", "[", "start_var", "+", "8", ":", "end_var", "]", "assert", "config", ".", "has", "(", "var", ")", ",", "\"could not find variable \"", "+", "var", "config", ".", "network_topology_json", "=", "(", "config", ".", "network_topology_json", "[", ":", "start_var", "]", "+", "config", ".", "value", "(", "var", ",", "\"\"", ")", "+", "config", ".", "network_topology_json", "[", "end_var", "+", "1", ":", "]", ")", "print", "(", "\"substituting variable %s with %s\"", "%", "(", "var", ",", "config", ".", "value", "(", "var", ",", "\"\"", ")", ")", ",", "file", "=", "log", ".", "v4", ")", "start_var", "=", "config", ".", "network_topology_json", ".", "find", "(", "'(config:'", ",", "start_var", "+", "1", ")", "try", ":", "import", "json", "json_content", "=", "json", ".", "loads", "(", "config", ".", "network_topology_json", ")", "except", "ValueError", "as", "e", ":", "print", "(", "\"----- BEGIN JSON CONTENT -----\"", ",", "file", "=", "log", ".", "v3", ")", "print", "(", "config", ".", "network_topology_json", ",", "file", "=", "log", ".", "v3", ")", "print", "(", "\"------ END JSON CONTENT ------\"", ",", "file", "=", "log", ".", "v3", ")", "assert", "False", ",", "\"invalid json content, %r\"", "%", "e", "assert", "isinstance", "(", "json_content", ",", "dict", ")", "if", "'network'", "in", "json_content", ":", "json_content", "=", "json_content", "[", "'network'", "]", "assert", "json_content", "if", "not", "json_content", ":", "if", "not", "mask", ":", "if", "sum", "(", "config", ".", "float_list", "(", "'dropout'", ",", "[", "0", "]", ")", ")", ">", "0.0", ":", "mask", "=", "\"dropout\"", "from", "returnn", ".", "network_description", "import", "LayerNetworkDescription", "description", "=", "LayerNetworkDescription", ".", "from_config", "(", "config", ")", "json_content", "=", "description", ".", "to_json_content", "(", "mask", "=", "mask", ")", "return", "json_content" ]
https://github.com/rwth-i6/returnn/blob/f2d718a197a280b0d5f0fd91a7fcb8658560dddb/returnn/config.py#L584-L626
francisck/DanderSpritz_docs
86bb7caca5a957147f120b18bb5c31f299914904
Python/Core/Lib/_abcoll.py
python
MutableSet.remove
(self, value)
Remove an element. If not a member, raise a KeyError.
Remove an element. If not a member, raise a KeyError.
[ "Remove", "an", "element", ".", "If", "not", "a", "member", "raise", "a", "KeyError", "." ]
def remove(self, value): """Remove an element. If not a member, raise a KeyError.""" if value not in self: raise KeyError(value) self.discard(value)
[ "def", "remove", "(", "self", ",", "value", ")", ":", "if", "value", "not", "in", "self", ":", "raise", "KeyError", "(", "value", ")", "self", ".", "discard", "(", "value", ")" ]
https://github.com/francisck/DanderSpritz_docs/blob/86bb7caca5a957147f120b18bb5c31f299914904/Python/Core/Lib/_abcoll.py#L271-L275
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/encodings/iso8859_13.py
python
getregentry
()
return codecs.CodecInfo( name='iso8859-13', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
[]
def getregentry(): return codecs.CodecInfo( name='iso8859-13', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
[ "def", "getregentry", "(", ")", ":", "return", "codecs", ".", "CodecInfo", "(", "name", "=", "'iso8859-13'", ",", "encode", "=", "Codec", "(", ")", ".", "encode", ",", "decode", "=", "Codec", "(", ")", ".", "decode", ",", "incrementalencoder", "=", "IncrementalEncoder", ",", "incrementaldecoder", "=", "IncrementalDecoder", ",", "streamreader", "=", "StreamReader", ",", "streamwriter", "=", "StreamWriter", ",", ")" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/encodings/iso8859_13.py#L33-L42
CenterForOpenScience/osf.io
cc02691be017e61e2cd64f19b848b2f4c18dcc84
osf/models/schema_response.py
python
SchemaResponse._validate_accept_trigger
(self, user)
Validate usage of the "accept" trigger on the underlying ApprovalsMachine "accept" has three valid usages: First, "accept" is called from within the "approve" trigger once all required approvals have been granted. This call should receive the user who issued the final "approve" so that the correct SchemaResponseAction can be logged. Second, moderators "accept" a SchemaResponse if it belongs to a moderated parent resource. In this case, the user must have the correct permission on the parent's provider. Finally, "accept" can be called without a user in order to bypass the need for approvals (the "internal accept shortcut") to make life easier for OSF scripts and utilities. "accept" can only be invoked from UNAPPROVED and PENDING_MODERATION, calling from any other state will result in a MachineError prior to this validation.
Validate usage of the "accept" trigger on the underlying ApprovalsMachine
[ "Validate", "usage", "of", "the", "accept", "trigger", "on", "the", "underlying", "ApprovalsMachine" ]
def _validate_accept_trigger(self, user): """Validate usage of the "accept" trigger on the underlying ApprovalsMachine "accept" has three valid usages: First, "accept" is called from within the "approve" trigger once all required approvals have been granted. This call should receive the user who issued the final "approve" so that the correct SchemaResponseAction can be logged. Second, moderators "accept" a SchemaResponse if it belongs to a moderated parent resource. In this case, the user must have the correct permission on the parent's provider. Finally, "accept" can be called without a user in order to bypass the need for approvals (the "internal accept shortcut") to make life easier for OSF scripts and utilities. "accept" can only be invoked from UNAPPROVED and PENDING_MODERATION, calling from any other state will result in a MachineError prior to this validation. """ if self.state is ApprovalStates.UNAPPROVED: # user = None -> internal accept shortcut # not self.pending_approvers.exists() -> called from within "approve" if user is None or not self.pending_approvers.exists(): return raise MachineError( f'Invalid usage of "accept" trigger from UNAPPROVED state ' f'against SchemaResponse with id [{self._id}]' ) if not user.has_perm('accept_submissions', self.parent.provider): raise PermissionsError( f'User {user} is not a modrator on {self.parent.provider} and does not ' f'have permission to "accept" SchemaResponse with id [{self._id}]' )
[ "def", "_validate_accept_trigger", "(", "self", ",", "user", ")", ":", "if", "self", ".", "state", "is", "ApprovalStates", ".", "UNAPPROVED", ":", "# user = None -> internal accept shortcut", "# not self.pending_approvers.exists() -> called from within \"approve\"", "if", "user", "is", "None", "or", "not", "self", ".", "pending_approvers", ".", "exists", "(", ")", ":", "return", "raise", "MachineError", "(", "f'Invalid usage of \"accept\" trigger from UNAPPROVED state '", "f'against SchemaResponse with id [{self._id}]'", ")", "if", "not", "user", ".", "has_perm", "(", "'accept_submissions'", ",", "self", ".", "parent", ".", "provider", ")", ":", "raise", "PermissionsError", "(", "f'User {user} is not a modrator on {self.parent.provider} and does not '", "f'have permission to \"accept\" SchemaResponse with id [{self._id}]'", ")" ]
https://github.com/CenterForOpenScience/osf.io/blob/cc02691be017e61e2cd64f19b848b2f4c18dcc84/osf/models/schema_response.py#L384-L415
general03/flask-autoindex
424246242c9f40aeb9ac2c8c63f4d2234024256e
.eggs/click-7.1.1-py3.7.egg/click/decorators.py
python
help_option
(*param_decls, **attrs)
return decorator
Adds a ``--help`` option which immediately ends the program printing out the help page. This is usually unnecessary to add as this is added by default to all commands unless suppressed. Like :func:`version_option`, this is implemented as eager option that prints in the callback and exits. All arguments are forwarded to :func:`option`.
Adds a ``--help`` option which immediately ends the program printing out the help page. This is usually unnecessary to add as this is added by default to all commands unless suppressed.
[ "Adds", "a", "--", "help", "option", "which", "immediately", "ends", "the", "program", "printing", "out", "the", "help", "page", ".", "This", "is", "usually", "unnecessary", "to", "add", "as", "this", "is", "added", "by", "default", "to", "all", "commands", "unless", "suppressed", "." ]
def help_option(*param_decls, **attrs): """Adds a ``--help`` option which immediately ends the program printing out the help page. This is usually unnecessary to add as this is added by default to all commands unless suppressed. Like :func:`version_option`, this is implemented as eager option that prints in the callback and exits. All arguments are forwarded to :func:`option`. """ def decorator(f): def callback(ctx, param, value): if value and not ctx.resilient_parsing: echo(ctx.get_help(), color=ctx.color) ctx.exit() attrs.setdefault("is_flag", True) attrs.setdefault("expose_value", False) attrs.setdefault("help", "Show this message and exit.") attrs.setdefault("is_eager", True) attrs["callback"] = callback return option(*(param_decls or ("--help",)), **attrs)(f) return decorator
[ "def", "help_option", "(", "*", "param_decls", ",", "*", "*", "attrs", ")", ":", "def", "decorator", "(", "f", ")", ":", "def", "callback", "(", "ctx", ",", "param", ",", "value", ")", ":", "if", "value", "and", "not", "ctx", ".", "resilient_parsing", ":", "echo", "(", "ctx", ".", "get_help", "(", ")", ",", "color", "=", "ctx", ".", "color", ")", "ctx", ".", "exit", "(", ")", "attrs", ".", "setdefault", "(", "\"is_flag\"", ",", "True", ")", "attrs", ".", "setdefault", "(", "\"expose_value\"", ",", "False", ")", "attrs", ".", "setdefault", "(", "\"help\"", ",", "\"Show this message and exit.\"", ")", "attrs", ".", "setdefault", "(", "\"is_eager\"", ",", "True", ")", "attrs", "[", "\"callback\"", "]", "=", "callback", "return", "option", "(", "*", "(", "param_decls", "or", "(", "\"--help\"", ",", ")", ")", ",", "*", "*", "attrs", ")", "(", "f", ")", "return", "decorator" ]
https://github.com/general03/flask-autoindex/blob/424246242c9f40aeb9ac2c8c63f4d2234024256e/.eggs/click-7.1.1-py3.7.egg/click/decorators.py#L309-L333
Neoteroi/BlackSheep
2936cdd3ba6fceacd230a02c99241bde1d06b265
blacksheep/client/session.py
python
_get_default_pools_for_loop
(loop)
This function is meant to help users of the ClientSession: to prevent creating many pools, decreasing performance only by instantiating several ClientSession without a specific HTTP Connections pools.
This function is meant to help users of the ClientSession: to prevent creating many pools, decreasing performance only by instantiating several ClientSession without a specific HTTP Connections pools.
[ "This", "function", "is", "meant", "to", "help", "users", "of", "the", "ClientSession", ":", "to", "prevent", "creating", "many", "pools", "decreasing", "performance", "only", "by", "instantiating", "several", "ClientSession", "without", "a", "specific", "HTTP", "Connections", "pools", "." ]
def _get_default_pools_for_loop(loop): """ This function is meant to help users of the ClientSession: to prevent creating many pools, decreasing performance only by instantiating several ClientSession without a specific HTTP Connections pools. """ loop_id = id(loop) try: return _default_pools_by_loop_id[loop_id] except KeyError: pools = ClientConnectionPools(loop) _default_pools_by_loop_id[loop_id] = pools return pools
[ "def", "_get_default_pools_for_loop", "(", "loop", ")", ":", "loop_id", "=", "id", "(", "loop", ")", "try", ":", "return", "_default_pools_by_loop_id", "[", "loop_id", "]", "except", "KeyError", ":", "pools", "=", "ClientConnectionPools", "(", "loop", ")", "_default_pools_by_loop_id", "[", "loop_id", "]", "=", "pools", "return", "pools" ]
https://github.com/Neoteroi/BlackSheep/blob/2936cdd3ba6fceacd230a02c99241bde1d06b265/blacksheep/client/session.py#L60-L73
tenpy/tenpy
bbdd3dbbdb511948eb0e6ba7ff619ac6ca657fff
tenpy/algorithms/mps_common.py
python
VariationalCompression.run
(self)
return TruncationError(max_trunc_err, 1. - 2. * max_trunc_err)
Run the compression. The state :attr:`psi` is compressed in place. .. warning :: Call this function directly after initializing the class, without modifying `psi` inbetween. A copy of :attr:`psi` is made during :meth:`init_env`! Returns ------- max_trunc_err : :class:`~tenpy.algorithms.truncation.TruncationError` The maximal truncation error of a two-site wave function.
Run the compression.
[ "Run", "the", "compression", "." ]
def run(self): """Run the compression. The state :attr:`psi` is compressed in place. .. warning :: Call this function directly after initializing the class, without modifying `psi` inbetween. A copy of :attr:`psi` is made during :meth:`init_env`! Returns ------- max_trunc_err : :class:`~tenpy.algorithms.truncation.TruncationError` The maximal truncation error of a two-site wave function. """ self.options.deprecated_alias("N_sweeps", "max_sweeps", "Also check out the other new convergence parameters " "min_N_sweeps and tol_theta_diff!") max_sweeps = self.options.get("max_sweeps", 2) min_sweeps = self.options.get("min_sweeps", 1) tol_diff = self._tol_theta_diff = self.options.get("tol_theta_diff", 1.e-8) if min_sweeps == max_sweeps and tol_diff is not None: warnings.warn("VariationalCompression with min_sweeps=max_sweeps: " "we recommend to set tol_theta_diff=None to avoid overhead") for i in range(max_sweeps): self.renormalize = [] self._theta_diff = [] max_trunc_err = self.sweep() if i + 1 >= min_sweeps and tol_diff is not None: max_diff = max(self._theta_diff[-(self.psi.L - self.n_optimize):]) if max_diff < tol_diff: logger.debug("break VariationalCompression after %d sweeps " "with theta_diff=%.2e", i + 1, max_diff) break if self.psi.finite: self.psi.norm *= max(self.renormalize) return TruncationError(max_trunc_err, 1. - 2. * max_trunc_err)
[ "def", "run", "(", "self", ")", ":", "self", ".", "options", ".", "deprecated_alias", "(", "\"N_sweeps\"", ",", "\"max_sweeps\"", ",", "\"Also check out the other new convergence parameters \"", "\"min_N_sweeps and tol_theta_diff!\"", ")", "max_sweeps", "=", "self", ".", "options", ".", "get", "(", "\"max_sweeps\"", ",", "2", ")", "min_sweeps", "=", "self", ".", "options", ".", "get", "(", "\"min_sweeps\"", ",", "1", ")", "tol_diff", "=", "self", ".", "_tol_theta_diff", "=", "self", ".", "options", ".", "get", "(", "\"tol_theta_diff\"", ",", "1.e-8", ")", "if", "min_sweeps", "==", "max_sweeps", "and", "tol_diff", "is", "not", "None", ":", "warnings", ".", "warn", "(", "\"VariationalCompression with min_sweeps=max_sweeps: \"", "\"we recommend to set tol_theta_diff=None to avoid overhead\"", ")", "for", "i", "in", "range", "(", "max_sweeps", ")", ":", "self", ".", "renormalize", "=", "[", "]", "self", ".", "_theta_diff", "=", "[", "]", "max_trunc_err", "=", "self", ".", "sweep", "(", ")", "if", "i", "+", "1", ">=", "min_sweeps", "and", "tol_diff", "is", "not", "None", ":", "max_diff", "=", "max", "(", "self", ".", "_theta_diff", "[", "-", "(", "self", ".", "psi", ".", "L", "-", "self", ".", "n_optimize", ")", ":", "]", ")", "if", "max_diff", "<", "tol_diff", ":", "logger", ".", "debug", "(", "\"break VariationalCompression after %d sweeps \"", "\"with theta_diff=%.2e\"", ",", "i", "+", "1", ",", "max_diff", ")", "break", "if", "self", ".", "psi", ".", "finite", ":", "self", ".", "psi", ".", "norm", "*=", "max", "(", "self", ".", "renormalize", ")", "return", "TruncationError", "(", "max_trunc_err", ",", "1.", "-", "2.", "*", "max_trunc_err", ")" ]
https://github.com/tenpy/tenpy/blob/bbdd3dbbdb511948eb0e6ba7ff619ac6ca657fff/tenpy/algorithms/mps_common.py#L1251-L1287
online-ml/river
3732f700da72642afe54095d4b252b05c5018c7d
river/tree/hoeffding_tree.py
python
HoeffdingTree._new_leaf
( self, initial_stats: dict = None, parent: typing.Union[HTLeaf, DTBranch] = None )
Create a new learning node. The characteristics of the learning node depends on the tree algorithm. Parameters ---------- initial_stats Target statistics set from the parent node. parent Parent node to inherit from. Returns ------- A new learning node.
Create a new learning node.
[ "Create", "a", "new", "learning", "node", "." ]
def _new_leaf( self, initial_stats: dict = None, parent: typing.Union[HTLeaf, DTBranch] = None ) -> HTLeaf: """Create a new learning node. The characteristics of the learning node depends on the tree algorithm. Parameters ---------- initial_stats Target statistics set from the parent node. parent Parent node to inherit from. Returns ------- A new learning node. """
[ "def", "_new_leaf", "(", "self", ",", "initial_stats", ":", "dict", "=", "None", ",", "parent", ":", "typing", ".", "Union", "[", "HTLeaf", ",", "DTBranch", "]", "=", "None", ")", "->", "HTLeaf", ":" ]
https://github.com/online-ml/river/blob/3732f700da72642afe54095d4b252b05c5018c7d/river/tree/hoeffding_tree.py#L206-L223
fabiocaccamo/django-maintenance-mode
e4102ed35644ab77af250344a37656b3312655ad
maintenance_mode/http.py
python
get_maintenance_response
(request)
return response
Return a '503 Service Unavailable' maintenance response.
Return a '503 Service Unavailable' maintenance response.
[ "Return", "a", "503", "Service", "Unavailable", "maintenance", "response", "." ]
def get_maintenance_response(request): """ Return a '503 Service Unavailable' maintenance response. """ if settings.MAINTENANCE_MODE_REDIRECT_URL: return redirect(settings.MAINTENANCE_MODE_REDIRECT_URL) context = {} if settings.MAINTENANCE_MODE_GET_TEMPLATE_CONTEXT: try: get_request_context_func = import_string( settings.MAINTENANCE_MODE_GET_TEMPLATE_CONTEXT) except ImportError: raise ImproperlyConfigured( 'settings.MAINTENANCE_MODE_GET_TEMPLATE_CONTEXT ' 'is not a valid function path.' ) context = get_request_context_func(request=request) kwargs = {'context': context} if django.VERSION < (1, 8): kwargs = {'context_instance': RequestContext(request, context)} response = render(request, settings.MAINTENANCE_MODE_TEMPLATE, status=settings.MAINTENANCE_MODE_STATUS_CODE, **kwargs) response['Retry-After'] = settings.MAINTENANCE_MODE_RETRY_AFTER add_never_cache_headers(response) return response
[ "def", "get_maintenance_response", "(", "request", ")", ":", "if", "settings", ".", "MAINTENANCE_MODE_REDIRECT_URL", ":", "return", "redirect", "(", "settings", ".", "MAINTENANCE_MODE_REDIRECT_URL", ")", "context", "=", "{", "}", "if", "settings", ".", "MAINTENANCE_MODE_GET_TEMPLATE_CONTEXT", ":", "try", ":", "get_request_context_func", "=", "import_string", "(", "settings", ".", "MAINTENANCE_MODE_GET_TEMPLATE_CONTEXT", ")", "except", "ImportError", ":", "raise", "ImproperlyConfigured", "(", "'settings.MAINTENANCE_MODE_GET_TEMPLATE_CONTEXT '", "'is not a valid function path.'", ")", "context", "=", "get_request_context_func", "(", "request", "=", "request", ")", "kwargs", "=", "{", "'context'", ":", "context", "}", "if", "django", ".", "VERSION", "<", "(", "1", ",", "8", ")", ":", "kwargs", "=", "{", "'context_instance'", ":", "RequestContext", "(", "request", ",", "context", ")", "}", "response", "=", "render", "(", "request", ",", "settings", ".", "MAINTENANCE_MODE_TEMPLATE", ",", "status", "=", "settings", ".", "MAINTENANCE_MODE_STATUS_CODE", ",", "*", "*", "kwargs", ")", "response", "[", "'Retry-After'", "]", "=", "settings", ".", "MAINTENANCE_MODE_RETRY_AFTER", "add_never_cache_headers", "(", "response", ")", "return", "response" ]
https://github.com/fabiocaccamo/django-maintenance-mode/blob/e4102ed35644ab77af250344a37656b3312655ad/maintenance_mode/http.py#L34-L64
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/benchmarks/src/benchmarks/sympy/sympy/core/numbers.py
python
One._eval_order
(self, *symbols)
return
[]
def _eval_order(self, *symbols): return
[ "def", "_eval_order", "(", "self", ",", "*", "symbols", ")", ":", "return" ]
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/core/numbers.py#L1966-L1967
sabri-zaki/EasY_HaCk
2a39ac384dd0d6fc51c0dd22e8d38cece683fdb9
.modules/.sqlmap/thirdparty/bottle/bottle.py
python
Bottle.add_route
(self, route)
Add a route object, but do not change the :data:`Route.app` attribute.
Add a route object, but do not change the :data:`Route.app` attribute.
[ "Add", "a", "route", "object", "but", "do", "not", "change", "the", ":", "data", ":", "Route", ".", "app", "attribute", "." ]
def add_route(self, route): """ Add a route object, but do not change the :data:`Route.app` attribute.""" self.routes.append(route) self.router.add(route.rule, route.method, route, name=route.name) if DEBUG: route.prepare()
[ "def", "add_route", "(", "self", ",", "route", ")", ":", "self", ".", "routes", ".", "append", "(", "route", ")", "self", ".", "router", ".", "add", "(", "route", ".", "rule", ",", "route", ".", "method", ",", "route", ",", "name", "=", "route", ".", "name", ")", "if", "DEBUG", ":", "route", ".", "prepare", "(", ")" ]
https://github.com/sabri-zaki/EasY_HaCk/blob/2a39ac384dd0d6fc51c0dd22e8d38cece683fdb9/.modules/.sqlmap/thirdparty/bottle/bottle.py#L834-L839
w3h/isf
6faf0a3df185465ec17369c90ccc16e2a03a1870
lib/thirdparty/pyreadline/console/ironpython_console.py
python
Console.text
(self, x, y, text, attr=None)
Write text at the given position.
Write text at the given position.
[ "Write", "text", "at", "the", "given", "position", "." ]
def text(self, x, y, text, attr=None): '''Write text at the given position.''' self.pos(x, y) self.write_color(text, attr)
[ "def", "text", "(", "self", ",", "x", ",", "y", ",", "text", ",", "attr", "=", "None", ")", ":", "self", ".", "pos", "(", "x", ",", "y", ")", "self", ".", "write_color", "(", "text", ",", "attr", ")" ]
https://github.com/w3h/isf/blob/6faf0a3df185465ec17369c90ccc16e2a03a1870/lib/thirdparty/pyreadline/console/ironpython_console.py#L260-L263
saltstack/salt
fae5bc757ad0f1716483ce7ae180b451545c2058
salt/modules/osquery.py
python
file_
(attrs=None, where=None)
return _osquery_cmd(table="file", attrs=attrs, where=where)
Return file information from osquery CLI Example: .. code-block:: bash salt '*' osquery.file
Return file information from osquery
[ "Return", "file", "information", "from", "osquery" ]
def file_(attrs=None, where=None): """ Return file information from osquery CLI Example: .. code-block:: bash salt '*' osquery.file """ return _osquery_cmd(table="file", attrs=attrs, where=where)
[ "def", "file_", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "\"file\"", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")" ]
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/modules/osquery.py#L1016-L1026
analysiscenter/batchflow
294747da0bca309785f925be891441fdd824e9fa
batchflow/pipeline.py
python
Pipeline.delete_all_variables
(self)
Delete all variables
Delete all variables
[ "Delete", "all", "variables" ]
def delete_all_variables(self): """ Delete all variables """ self.variables = VariableDirectory()
[ "def", "delete_all_variables", "(", "self", ")", ":", "self", ".", "variables", "=", "VariableDirectory", "(", ")" ]
https://github.com/analysiscenter/batchflow/blob/294747da0bca309785f925be891441fdd824e9fa/batchflow/pipeline.py#L661-L663
pytorch/ignite
e452884afa36b63447b9029c4773f6f59cf41340
ignite/engine/__init__.py
python
supervised_training_step_tpu
( model: torch.nn.Module, optimizer: torch.optim.Optimizer, loss_fn: Union[Callable, torch.nn.Module], device: Optional[Union[str, torch.device]] = None, non_blocking: bool = False, prepare_batch: Callable = _prepare_batch, output_transform: Callable[[Any, Any, Any, torch.Tensor], Any] = lambda x, y, y_pred, loss: loss.item(), gradient_accumulation_steps: int = 1, )
return update
Factory function for supervised training using ``torch_xla``. Args: model: the model to train. optimizer: the optimizer to use. loss_fn: the loss function to use. device: device type specification (default: None). Applies to batches after starting the engine. Model *will not* be moved. Device can be CPU, TPU. non_blocking: if True and this copy is between CPU and GPU, the copy may occur asynchronously with respect to the host. For other cases, this argument has no effect. prepare_batch: function that receives `batch`, `device`, `non_blocking` and outputs tuple of tensors `(batch_x, batch_y)`. output_transform: function that receives 'x', 'y', 'y_pred', 'loss' and returns value to be assigned to engine's state.output after each iteration. Default is returning `loss.item()`. gradient_accumulation_steps: Number of steps the gradients should be accumulated across. (default: 1 (means no gradient accumulation)) Returns: Callable: update function. Examples: .. code-block:: python from ignite.engine import Engine, supervised_training_step_tpu model = ... optimizer = ... loss_fn = ... update_fn = supervised_training_step_tpu(model, optimizer, loss_fn, 'xla') trainer = Engine(update_fn) .. versionadded:: 0.4.5 .. versionchanged:: 0.5.0 Added Gradient Accumulation argument for all supervised training methods.
Factory function for supervised training using ``torch_xla``.
[ "Factory", "function", "for", "supervised", "training", "using", "torch_xla", "." ]
def supervised_training_step_tpu( model: torch.nn.Module, optimizer: torch.optim.Optimizer, loss_fn: Union[Callable, torch.nn.Module], device: Optional[Union[str, torch.device]] = None, non_blocking: bool = False, prepare_batch: Callable = _prepare_batch, output_transform: Callable[[Any, Any, Any, torch.Tensor], Any] = lambda x, y, y_pred, loss: loss.item(), gradient_accumulation_steps: int = 1, ) -> Callable: """Factory function for supervised training using ``torch_xla``. Args: model: the model to train. optimizer: the optimizer to use. loss_fn: the loss function to use. device: device type specification (default: None). Applies to batches after starting the engine. Model *will not* be moved. Device can be CPU, TPU. non_blocking: if True and this copy is between CPU and GPU, the copy may occur asynchronously with respect to the host. For other cases, this argument has no effect. prepare_batch: function that receives `batch`, `device`, `non_blocking` and outputs tuple of tensors `(batch_x, batch_y)`. output_transform: function that receives 'x', 'y', 'y_pred', 'loss' and returns value to be assigned to engine's state.output after each iteration. Default is returning `loss.item()`. gradient_accumulation_steps: Number of steps the gradients should be accumulated across. (default: 1 (means no gradient accumulation)) Returns: Callable: update function. Examples: .. code-block:: python from ignite.engine import Engine, supervised_training_step_tpu model = ... optimizer = ... loss_fn = ... update_fn = supervised_training_step_tpu(model, optimizer, loss_fn, 'xla') trainer = Engine(update_fn) .. versionadded:: 0.4.5 .. versionchanged:: 0.5.0 Added Gradient Accumulation argument for all supervised training methods. """ try: import torch_xla.core.xla_model as xm except ModuleNotFoundError: raise ModuleNotFoundError("torch_xla cannot be imported, please install PyTorch XLA.") if gradient_accumulation_steps <= 0: raise ValueError( "Gradient_accumulation_steps must be strictly positive. " "No gradient accumulation if the value set to one (default)." ) def update(engine: Engine, batch: Sequence[torch.Tensor]) -> Union[Any, Tuple[torch.Tensor]]: model.train() x, y = prepare_batch(batch, device=device, non_blocking=non_blocking) y_pred = model(x) loss = loss_fn(y_pred, y) if gradient_accumulation_steps > 1: loss = loss / gradient_accumulation_steps loss.backward() if engine.state.iteration % gradient_accumulation_steps == 0: xm.optimizer_step(optimizer, barrier=True) optimizer.zero_grad() return output_transform(x, y, y_pred, loss) return update
[ "def", "supervised_training_step_tpu", "(", "model", ":", "torch", ".", "nn", ".", "Module", ",", "optimizer", ":", "torch", ".", "optim", ".", "Optimizer", ",", "loss_fn", ":", "Union", "[", "Callable", ",", "torch", ".", "nn", ".", "Module", "]", ",", "device", ":", "Optional", "[", "Union", "[", "str", ",", "torch", ".", "device", "]", "]", "=", "None", ",", "non_blocking", ":", "bool", "=", "False", ",", "prepare_batch", ":", "Callable", "=", "_prepare_batch", ",", "output_transform", ":", "Callable", "[", "[", "Any", ",", "Any", ",", "Any", ",", "torch", ".", "Tensor", "]", ",", "Any", "]", "=", "lambda", "x", ",", "y", ",", "y_pred", ",", "loss", ":", "loss", ".", "item", "(", ")", ",", "gradient_accumulation_steps", ":", "int", "=", "1", ",", ")", "->", "Callable", ":", "try", ":", "import", "torch_xla", ".", "core", ".", "xla_model", "as", "xm", "except", "ModuleNotFoundError", ":", "raise", "ModuleNotFoundError", "(", "\"torch_xla cannot be imported, please install PyTorch XLA.\"", ")", "if", "gradient_accumulation_steps", "<=", "0", ":", "raise", "ValueError", "(", "\"Gradient_accumulation_steps must be strictly positive. \"", "\"No gradient accumulation if the value set to one (default).\"", ")", "def", "update", "(", "engine", ":", "Engine", ",", "batch", ":", "Sequence", "[", "torch", ".", "Tensor", "]", ")", "->", "Union", "[", "Any", ",", "Tuple", "[", "torch", ".", "Tensor", "]", "]", ":", "model", ".", "train", "(", ")", "x", ",", "y", "=", "prepare_batch", "(", "batch", ",", "device", "=", "device", ",", "non_blocking", "=", "non_blocking", ")", "y_pred", "=", "model", "(", "x", ")", "loss", "=", "loss_fn", "(", "y_pred", ",", "y", ")", "if", "gradient_accumulation_steps", ">", "1", ":", "loss", "=", "loss", "/", "gradient_accumulation_steps", "loss", ".", "backward", "(", ")", "if", "engine", ".", "state", ".", "iteration", "%", "gradient_accumulation_steps", "==", "0", ":", "xm", ".", "optimizer_step", "(", "optimizer", ",", "barrier", "=", "True", ")", "optimizer", ".", "zero_grad", "(", ")", "return", "output_transform", "(", "x", ",", "y", ",", "y_pred", ",", "loss", ")", "return", "update" ]
https://github.com/pytorch/ignite/blob/e452884afa36b63447b9029c4773f6f59cf41340/ignite/engine/__init__.py#L275-L346
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
desktop/core/ext-py/Django-1.11.29/django/db/backends/base/base.py
python
BaseDatabaseWrapper.ensure_timezone
(self)
return False
Ensure the connection's timezone is set to `self.timezone_name` and return whether it changed or not.
Ensure the connection's timezone is set to `self.timezone_name` and return whether it changed or not.
[ "Ensure", "the", "connection", "s", "timezone", "is", "set", "to", "self", ".", "timezone_name", "and", "return", "whether", "it", "changed", "or", "not", "." ]
def ensure_timezone(self): """ Ensure the connection's timezone is set to `self.timezone_name` and return whether it changed or not. """ return False
[ "def", "ensure_timezone", "(", "self", ")", ":", "return", "False" ]
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/Django-1.11.29/django/db/backends/base/base.py#L103-L108
kuri65536/python-for-android
26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891
python-modules/twisted/twisted/lore/tree.py
python
getFirstAncestorWithSectionHeader
(entry)
return []
Visit the ancestors of C{entry} until one with at least one C{h2} child node is found, then return all of that node's C{h2} child nodes. @type entry: A DOM Node @param entry: The node from which to begin traversal. This node itself is excluded from consideration. @rtype: C{list} of DOM Nodes @return: All C{h2} nodes of the ultimately selected parent node.
Visit the ancestors of C{entry} until one with at least one C{h2} child node is found, then return all of that node's C{h2} child nodes.
[ "Visit", "the", "ancestors", "of", "C", "{", "entry", "}", "until", "one", "with", "at", "least", "one", "C", "{", "h2", "}", "child", "node", "is", "found", "then", "return", "all", "of", "that", "node", "s", "C", "{", "h2", "}", "child", "nodes", "." ]
def getFirstAncestorWithSectionHeader(entry): """ Visit the ancestors of C{entry} until one with at least one C{h2} child node is found, then return all of that node's C{h2} child nodes. @type entry: A DOM Node @param entry: The node from which to begin traversal. This node itself is excluded from consideration. @rtype: C{list} of DOM Nodes @return: All C{h2} nodes of the ultimately selected parent node. """ for a in domhelpers.getParents(entry)[1:]: headers = domhelpers.findNodesNamed(a, "h2") if len(headers) > 0: return headers return []
[ "def", "getFirstAncestorWithSectionHeader", "(", "entry", ")", ":", "for", "a", "in", "domhelpers", ".", "getParents", "(", "entry", ")", "[", "1", ":", "]", ":", "headers", "=", "domhelpers", ".", "findNodesNamed", "(", "a", ",", "\"h2\"", ")", "if", "len", "(", "headers", ")", ">", "0", ":", "return", "headers", "return", "[", "]" ]
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python-modules/twisted/twisted/lore/tree.py#L522-L538
celery/django-celery
c679b05b2abc174e6fa3231b120a07b49ec8f911
djcelery/schedulers.py
python
DatabaseScheduler.delete_task
(cls, name)
[]
def delete_task(cls, name): PeriodicTask._default_manager.get(name=name).delete()
[ "def", "delete_task", "(", "cls", ",", "name", ")", ":", "PeriodicTask", ".", "_default_manager", ".", "get", "(", "name", "=", "name", ")", ".", "delete", "(", ")" ]
https://github.com/celery/django-celery/blob/c679b05b2abc174e6fa3231b120a07b49ec8f911/djcelery/schedulers.py#L284-L285
ctxis/canape
5f0e03424577296bcc60c2008a60a98ec5307e4b
CANAPE.Scripting/Lib/xml/dom/pulldom.py
python
DOMEventStream._emit
(self)
return rc
Fallback replacement for getEvent() that emits the events that _slurp() read previously.
Fallback replacement for getEvent() that emits the events that _slurp() read previously.
[ "Fallback", "replacement", "for", "getEvent", "()", "that", "emits", "the", "events", "that", "_slurp", "()", "read", "previously", "." ]
def _emit(self): """ Fallback replacement for getEvent() that emits the events that _slurp() read previously. """ rc = self.pulldom.firstEvent[1][0] self.pulldom.firstEvent[1] = self.pulldom.firstEvent[1][1] return rc
[ "def", "_emit", "(", "self", ")", ":", "rc", "=", "self", ".", "pulldom", ".", "firstEvent", "[", "1", "]", "[", "0", "]", "self", ".", "pulldom", ".", "firstEvent", "[", "1", "]", "=", "self", ".", "pulldom", ".", "firstEvent", "[", "1", "]", "[", "1", "]", "return", "rc" ]
https://github.com/ctxis/canape/blob/5f0e03424577296bcc60c2008a60a98ec5307e4b/CANAPE.Scripting/Lib/xml/dom/pulldom.py#L280-L286
FriedAppleTeam/FRAPL
89c14d57e0cc77b915fe1e95f60e9e1847699103
Framework/FridaLink/FridaLink.py
python
TargetInfoCallable.__call__
(self)
[]
def __call__(self): self.request = self.engine.handleTargetInfo(self.platform, self.arch)
[ "def", "__call__", "(", "self", ")", ":", "self", ".", "request", "=", "self", ".", "engine", ".", "handleTargetInfo", "(", "self", ".", "platform", ",", "self", ".", "arch", ")" ]
https://github.com/FriedAppleTeam/FRAPL/blob/89c14d57e0cc77b915fe1e95f60e9e1847699103/Framework/FridaLink/FridaLink.py#L73-L74
spack/spack
675210bd8bd1c5d32ad1cc83d898fb43b569ed74
lib/spack/spack/modules/lmod.py
python
make_layout
(spec, module_set_name)
return LmodFileLayout(conf)
Returns the layout information for spec
Returns the layout information for spec
[ "Returns", "the", "layout", "information", "for", "spec" ]
def make_layout(spec, module_set_name): """Returns the layout information for spec """ conf = make_configuration(spec, module_set_name) return LmodFileLayout(conf)
[ "def", "make_layout", "(", "spec", ",", "module_set_name", ")", ":", "conf", "=", "make_configuration", "(", "spec", ",", "module_set_name", ")", "return", "LmodFileLayout", "(", "conf", ")" ]
https://github.com/spack/spack/blob/675210bd8bd1c5d32ad1cc83d898fb43b569ed74/lib/spack/spack/modules/lmod.py#L47-L50
adamreeve/npTDMS
bf4d2f65872347fde4fe7fcbb4135f466e87323e
nptdms/tdms.py
python
TdmsChannel.__len__
(self)
return self._length
Returns the number of values in this channel
Returns the number of values in this channel
[ "Returns", "the", "number", "of", "values", "in", "this", "channel" ]
def __len__(self): """ Returns the number of values in this channel """ return self._length
[ "def", "__len__", "(", "self", ")", ":", "return", "self", ".", "_length" ]
https://github.com/adamreeve/npTDMS/blob/bf4d2f65872347fde4fe7fcbb4135f466e87323e/nptdms/tdms.py#L454-L457
MarioVilas/winappdbg
975a088ac54253d0bdef39fe831e82f24b4c11f6
winappdbg/win32/kernel32.py
python
Handle.__copy__
(self)
return self.dup()
Duplicates the Win32 handle when copying the Python object. @rtype: L{Handle} @return: A new handle to the same Win32 object.
Duplicates the Win32 handle when copying the Python object.
[ "Duplicates", "the", "Win32", "handle", "when", "copying", "the", "Python", "object", "." ]
def __copy__(self): """ Duplicates the Win32 handle when copying the Python object. @rtype: L{Handle} @return: A new handle to the same Win32 object. """ return self.dup()
[ "def", "__copy__", "(", "self", ")", ":", "return", "self", ".", "dup", "(", ")" ]
https://github.com/MarioVilas/winappdbg/blob/975a088ac54253d0bdef39fe831e82f24b4c11f6/winappdbg/win32/kernel32.py#L645-L652
microsoft/malmo-challenge
a1dec75d0eb4cccc91f9d818a4ecae5fa1ac906f
malmopy/environment/malmo/malmo.py
python
MalmoEnvironment._await_next_obs
(self)
Ensure that an update to the world state is received :return:
Ensure that an update to the world state is received :return:
[ "Ensure", "that", "an", "update", "to", "the", "world", "state", "is", "received", ":", "return", ":" ]
def _await_next_obs(self): """ Ensure that an update to the world state is received :return: """ # Wait until we have everything we need current_state = self._agent.peekWorldState() while not self.is_valid(current_state) or not self._ready_to_act(current_state): if current_state.has_mission_begun and not current_state.is_mission_running: if not current_state.is_mission_running and len(current_state.mission_control_messages) > 0: # Parse the mission ended message: mission_end_tree = xml.etree.ElementTree.fromstring(current_state.mission_control_messages[-1].text) ns_dict = {"malmo": "http://ProjectMalmo.microsoft.com"} hr_stat = mission_end_tree.find("malmo:HumanReadableStatus", ns_dict).text self._end_result = hr_stat break # Peek fresh world state from socket current_state = self._agent.peekWorldState() # Flush current world as soon as we have the entire state self._world = self._agent.getWorldState() if self._world.is_mission_running: new_world = json.loads(self._world.observations[-1].text) if new_world is not None: self._world_obs = new_world # Update video frames if any if hasattr(self._world, 'video_frames') and len(self._world.video_frames) > 0: self._last_frame = self._world.video_frames[-1]
[ "def", "_await_next_obs", "(", "self", ")", ":", "# Wait until we have everything we need", "current_state", "=", "self", ".", "_agent", ".", "peekWorldState", "(", ")", "while", "not", "self", ".", "is_valid", "(", "current_state", ")", "or", "not", "self", ".", "_ready_to_act", "(", "current_state", ")", ":", "if", "current_state", ".", "has_mission_begun", "and", "not", "current_state", ".", "is_mission_running", ":", "if", "not", "current_state", ".", "is_mission_running", "and", "len", "(", "current_state", ".", "mission_control_messages", ")", ">", "0", ":", "# Parse the mission ended message:", "mission_end_tree", "=", "xml", ".", "etree", ".", "ElementTree", ".", "fromstring", "(", "current_state", ".", "mission_control_messages", "[", "-", "1", "]", ".", "text", ")", "ns_dict", "=", "{", "\"malmo\"", ":", "\"http://ProjectMalmo.microsoft.com\"", "}", "hr_stat", "=", "mission_end_tree", ".", "find", "(", "\"malmo:HumanReadableStatus\"", ",", "ns_dict", ")", ".", "text", "self", ".", "_end_result", "=", "hr_stat", "break", "# Peek fresh world state from socket", "current_state", "=", "self", ".", "_agent", ".", "peekWorldState", "(", ")", "# Flush current world as soon as we have the entire state", "self", ".", "_world", "=", "self", ".", "_agent", ".", "getWorldState", "(", ")", "if", "self", ".", "_world", ".", "is_mission_running", ":", "new_world", "=", "json", ".", "loads", "(", "self", ".", "_world", ".", "observations", "[", "-", "1", "]", ".", "text", ")", "if", "new_world", "is", "not", "None", ":", "self", ".", "_world_obs", "=", "new_world", "# Update video frames if any", "if", "hasattr", "(", "self", ".", "_world", ",", "'video_frames'", ")", "and", "len", "(", "self", ".", "_world", ".", "video_frames", ")", ">", "0", ":", "self", ".", "_last_frame", "=", "self", ".", "_world", ".", "video_frames", "[", "-", "1", "]" ]
https://github.com/microsoft/malmo-challenge/blob/a1dec75d0eb4cccc91f9d818a4ecae5fa1ac906f/malmopy/environment/malmo/malmo.py#L323-L354
khanhnamle1994/natural-language-processing
01d450d5ac002b0156ef4cf93a07cb508c1bcdc5
assignment1/.env/lib/python2.7/site-packages/pytz/__init__.py
python
UTC.localize
(self, dt, is_dst=False)
return dt.replace(tzinfo=self)
Convert naive time to local time
Convert naive time to local time
[ "Convert", "naive", "time", "to", "local", "time" ]
def localize(self, dt, is_dst=False): '''Convert naive time to local time''' if dt.tzinfo is not None: raise ValueError('Not naive datetime (tzinfo is already set)') return dt.replace(tzinfo=self)
[ "def", "localize", "(", "self", ",", "dt", ",", "is_dst", "=", "False", ")", ":", "if", "dt", ".", "tzinfo", "is", "not", "None", ":", "raise", "ValueError", "(", "'Not naive datetime (tzinfo is already set)'", ")", "return", "dt", ".", "replace", "(", "tzinfo", "=", "self", ")" ]
https://github.com/khanhnamle1994/natural-language-processing/blob/01d450d5ac002b0156ef4cf93a07cb508c1bcdc5/assignment1/.env/lib/python2.7/site-packages/pytz/__init__.py#L223-L227
GNOME/gnome-music
b7b55c8519f9cc613ca60c01a5ab8cef6b58c92e
gnomemusic/widgets/artstack.py
python
ArtStack._on_thumbnail_changed
(self, coreobject, uri)
[]
def _on_thumbnail_changed(self, coreobject, uri): self._disconnect_cache() self._handler_id = self._cache.connect( "finished", self._on_cache_result) self._async_queue.queue( self._cache, coreobject, self._size, self.props.scale_factor)
[ "def", "_on_thumbnail_changed", "(", "self", ",", "coreobject", ",", "uri", ")", ":", "self", ".", "_disconnect_cache", "(", ")", "self", ".", "_handler_id", "=", "self", ".", "_cache", ".", "connect", "(", "\"finished\"", ",", "self", ".", "_on_cache_result", ")", "self", ".", "_async_queue", ".", "queue", "(", "self", ".", "_cache", ",", "coreobject", ",", "self", ".", "_size", ",", "self", ".", "props", ".", "scale_factor", ")" ]
https://github.com/GNOME/gnome-music/blob/b7b55c8519f9cc613ca60c01a5ab8cef6b58c92e/gnomemusic/widgets/artstack.py#L131-L138
learningequality/ka-lite
571918ea668013dcf022286ea85eff1c5333fb8b
kalite/packages/bundled/django/core/files/storage.py
python
Storage.path
(self, name)
Returns a local filesystem path where the file can be retrieved using Python's built-in open() function. Storage systems that can't be accessed using open() should *not* implement this method.
Returns a local filesystem path where the file can be retrieved using Python's built-in open() function. Storage systems that can't be accessed using open() should *not* implement this method.
[ "Returns", "a", "local", "filesystem", "path", "where", "the", "file", "can", "be", "retrieved", "using", "Python", "s", "built", "-", "in", "open", "()", "function", ".", "Storage", "systems", "that", "can", "t", "be", "accessed", "using", "open", "()", "should", "*", "not", "*", "implement", "this", "method", "." ]
def path(self, name): """ Returns a local filesystem path where the file can be retrieved using Python's built-in open() function. Storage systems that can't be accessed using open() should *not* implement this method. """ raise NotImplementedError("This backend doesn't support absolute paths.")
[ "def", "path", "(", "self", ",", "name", ")", ":", "raise", "NotImplementedError", "(", "\"This backend doesn't support absolute paths.\"", ")" ]
https://github.com/learningequality/ka-lite/blob/571918ea668013dcf022286ea85eff1c5333fb8b/kalite/packages/bundled/django/core/files/storage.py#L79-L85
PYFTS/pyFTS
ad3e857024d0da068feb0d7eae214b55a038a9ca
pyFTS/benchmarks/Measures.py
python
UStatistic
(targets, forecasts)
return np.sqrt(np.divide(np.nansum(y), np.nansum(naive)))
Theil's U Statistic :param targets: :param forecasts: :return:
Theil's U Statistic
[ "Theil", "s", "U", "Statistic" ]
def UStatistic(targets, forecasts): """ Theil's U Statistic :param targets: :param forecasts: :return: """ if not isinstance(forecasts, (list, np.ndarray)): forecasts = np.array([forecasts]) else: forecasts = np.array(forecasts) if not isinstance(targets, (list, np.ndarray)): targets = np.array([targets]) else: targets = np.array(targets) l = forecasts.size l = 2 if l == 1 else l naive = [] y = [] for k in np.arange(0, l - 1): y.append(np.subtract(forecasts[k], targets[k]) ** 2) naive.append(np.subtract(targets[k + 1], targets[k]) ** 2) return np.sqrt(np.divide(np.nansum(y), np.nansum(naive)))
[ "def", "UStatistic", "(", "targets", ",", "forecasts", ")", ":", "if", "not", "isinstance", "(", "forecasts", ",", "(", "list", ",", "np", ".", "ndarray", ")", ")", ":", "forecasts", "=", "np", ".", "array", "(", "[", "forecasts", "]", ")", "else", ":", "forecasts", "=", "np", ".", "array", "(", "forecasts", ")", "if", "not", "isinstance", "(", "targets", ",", "(", "list", ",", "np", ".", "ndarray", ")", ")", ":", "targets", "=", "np", ".", "array", "(", "[", "targets", "]", ")", "else", ":", "targets", "=", "np", ".", "array", "(", "targets", ")", "l", "=", "forecasts", ".", "size", "l", "=", "2", "if", "l", "==", "1", "else", "l", "naive", "=", "[", "]", "y", "=", "[", "]", "for", "k", "in", "np", ".", "arange", "(", "0", ",", "l", "-", "1", ")", ":", "y", ".", "append", "(", "np", ".", "subtract", "(", "forecasts", "[", "k", "]", ",", "targets", "[", "k", "]", ")", "**", "2", ")", "naive", ".", "append", "(", "np", ".", "subtract", "(", "targets", "[", "k", "+", "1", "]", ",", "targets", "[", "k", "]", ")", "**", "2", ")", "return", "np", ".", "sqrt", "(", "np", ".", "divide", "(", "np", ".", "nansum", "(", "y", ")", ",", "np", ".", "nansum", "(", "naive", ")", ")", ")" ]
https://github.com/PYFTS/pyFTS/blob/ad3e857024d0da068feb0d7eae214b55a038a9ca/pyFTS/benchmarks/Measures.py#L112-L139
datafolklabs/cement
2d44d2c1821bda6bdfcfe605d244dc2dfb0b19a6
cement/cli/contrib/jinja2/bccache.py
python
Bucket.load_bytecode
(self, f)
Loads bytecode from a file or file like object.
Loads bytecode from a file or file like object.
[ "Loads", "bytecode", "from", "a", "file", "or", "file", "like", "object", "." ]
def load_bytecode(self, f): """Loads bytecode from a file or file like object.""" # make sure the magic header is correct magic = f.read(len(bc_magic)) if magic != bc_magic: self.reset() return # the source code of the file changed, we need to reload checksum = pickle.load(f) if self.checksum != checksum: self.reset() return # if marshal_load fails then we need to reload try: self.code = marshal_load(f) except (EOFError, ValueError, TypeError): self.reset() return
[ "def", "load_bytecode", "(", "self", ",", "f", ")", ":", "# make sure the magic header is correct", "magic", "=", "f", ".", "read", "(", "len", "(", "bc_magic", ")", ")", "if", "magic", "!=", "bc_magic", ":", "self", ".", "reset", "(", ")", "return", "# the source code of the file changed, we need to reload", "checksum", "=", "pickle", ".", "load", "(", "f", ")", "if", "self", ".", "checksum", "!=", "checksum", ":", "self", ".", "reset", "(", ")", "return", "# if marshal_load fails then we need to reload", "try", ":", "self", ".", "code", "=", "marshal_load", "(", "f", ")", "except", "(", "EOFError", ",", "ValueError", ",", "TypeError", ")", ":", "self", ".", "reset", "(", ")", "return" ]
https://github.com/datafolklabs/cement/blob/2d44d2c1821bda6bdfcfe605d244dc2dfb0b19a6/cement/cli/contrib/jinja2/bccache.py#L79-L96
openai/multiagent-competition
b2e081a192429913b7d21a89525901d59c85e7f3
gym-compete/gym_compete/new_envs/agents/agent.py
python
Agent.set_margin
(self, margin)
[]
def set_margin(self, margin): agent_geom_ids = [i for i, name in enumerate(self.env.model.geom_names) if self.in_scope(name)] m = self.env.model.geom_margin.copy() print("Resetting", self.scope, "margins to", margin) m[agent_geom_ids] = margin self.env.model.__setattr__('geom_margin', m)
[ "def", "set_margin", "(", "self", ",", "margin", ")", ":", "agent_geom_ids", "=", "[", "i", "for", "i", ",", "name", "in", "enumerate", "(", "self", ".", "env", ".", "model", ".", "geom_names", ")", "if", "self", ".", "in_scope", "(", "name", ")", "]", "m", "=", "self", ".", "env", ".", "model", ".", "geom_margin", ".", "copy", "(", ")", "print", "(", "\"Resetting\"", ",", "self", ".", "scope", ",", "\"margins to\"", ",", "margin", ")", "m", "[", "agent_geom_ids", "]", "=", "margin", "self", ".", "env", ".", "model", ".", "__setattr__", "(", "'geom_margin'", ",", "m", ")" ]
https://github.com/openai/multiagent-competition/blob/b2e081a192429913b7d21a89525901d59c85e7f3/gym-compete/gym_compete/new_envs/agents/agent.py#L247-L253
google/TensorNetwork
e12580f1749493dbe05f474d2fecdec4eaba73c5
tensornetwork/block_sparse/linalg.py
python
eigh
(matrix: BlockSparseTensor, UPLO: Optional[Text] = 'L')
return E, V
Compute the eigen decomposition of a hermitian `M` by `M` matrix `matrix`. Args: matrix: A matrix (i.e. a rank-2 tensor) of type `BlockSparseTensor` Returns: (ChargeArray,BlockSparseTensor): The eigenvalues and eigenvectors
Compute the eigen decomposition of a hermitian `M` by `M` matrix `matrix`. Args: matrix: A matrix (i.e. a rank-2 tensor) of type `BlockSparseTensor`
[ "Compute", "the", "eigen", "decomposition", "of", "a", "hermitian", "M", "by", "M", "matrix", "matrix", ".", "Args", ":", "matrix", ":", "A", "matrix", "(", "i", ".", "e", ".", "a", "rank", "-", "2", "tensor", ")", "of", "type", "BlockSparseTensor" ]
def eigh(matrix: BlockSparseTensor, UPLO: Optional[Text] = 'L') -> Tuple[ChargeArray, BlockSparseTensor]: """ Compute the eigen decomposition of a hermitian `M` by `M` matrix `matrix`. Args: matrix: A matrix (i.e. a rank-2 tensor) of type `BlockSparseTensor` Returns: (ChargeArray,BlockSparseTensor): The eigenvalues and eigenvectors """ if matrix.ndim != 2: raise NotImplementedError("eigh currently supports only rank-2 tensors.") flat_charges = matrix._charges flat_flows = matrix._flows flat_order = matrix.flat_order tr_partition = len(matrix._order[0]) blocks, charges, shapes = _find_transposed_diagonal_sparse_blocks( flat_charges, flat_flows, tr_partition, flat_order) eigvals = [] v_blocks = [] for n, block in enumerate(blocks): e, v = np.linalg.eigh(np.reshape(matrix.data[block], shapes[:, n]), UPLO) eigvals.append(e) v_blocks.append(v) tmp_labels = [ np.full(len(eigvals[n]), fill_value=n, dtype=np.int16) for n in range(len(eigvals)) ] if len(tmp_labels) > 0: eigvalscharge_labels = np.concatenate(tmp_labels) else: eigvalscharge_labels = np.empty(0, dtype=np.int16) eigvalscharge = charges[eigvalscharge_labels] if len(eigvals) > 0: all_eigvals = np.concatenate(eigvals) else: all_eigvals = np.empty(0, dtype=get_real_dtype(matrix.dtype)) E = ChargeArray(all_eigvals, [eigvalscharge], [False]) charges_v = [eigvalscharge] + [matrix._charges[o] for o in matrix._order[0]] order_v = [[0]] + [list(np.arange(1, len(matrix._order[0]) + 1))] flows_v = [True] + [matrix._flows[o] for o in matrix._order[0]] if len(v_blocks) > 0: all_v_blocks = np.concatenate([np.ravel(v.T) for v in v_blocks]) else: all_v_blocks = np.empty(0, dtype=matrix.dtype) V = BlockSparseTensor( all_v_blocks, charges=charges_v, flows=flows_v, order=order_v, check_consistency=False).transpose() return E, V
[ "def", "eigh", "(", "matrix", ":", "BlockSparseTensor", ",", "UPLO", ":", "Optional", "[", "Text", "]", "=", "'L'", ")", "->", "Tuple", "[", "ChargeArray", ",", "BlockSparseTensor", "]", ":", "if", "matrix", ".", "ndim", "!=", "2", ":", "raise", "NotImplementedError", "(", "\"eigh currently supports only rank-2 tensors.\"", ")", "flat_charges", "=", "matrix", ".", "_charges", "flat_flows", "=", "matrix", ".", "_flows", "flat_order", "=", "matrix", ".", "flat_order", "tr_partition", "=", "len", "(", "matrix", ".", "_order", "[", "0", "]", ")", "blocks", ",", "charges", ",", "shapes", "=", "_find_transposed_diagonal_sparse_blocks", "(", "flat_charges", ",", "flat_flows", ",", "tr_partition", ",", "flat_order", ")", "eigvals", "=", "[", "]", "v_blocks", "=", "[", "]", "for", "n", ",", "block", "in", "enumerate", "(", "blocks", ")", ":", "e", ",", "v", "=", "np", ".", "linalg", ".", "eigh", "(", "np", ".", "reshape", "(", "matrix", ".", "data", "[", "block", "]", ",", "shapes", "[", ":", ",", "n", "]", ")", ",", "UPLO", ")", "eigvals", ".", "append", "(", "e", ")", "v_blocks", ".", "append", "(", "v", ")", "tmp_labels", "=", "[", "np", ".", "full", "(", "len", "(", "eigvals", "[", "n", "]", ")", ",", "fill_value", "=", "n", ",", "dtype", "=", "np", ".", "int16", ")", "for", "n", "in", "range", "(", "len", "(", "eigvals", ")", ")", "]", "if", "len", "(", "tmp_labels", ")", ">", "0", ":", "eigvalscharge_labels", "=", "np", ".", "concatenate", "(", "tmp_labels", ")", "else", ":", "eigvalscharge_labels", "=", "np", ".", "empty", "(", "0", ",", "dtype", "=", "np", ".", "int16", ")", "eigvalscharge", "=", "charges", "[", "eigvalscharge_labels", "]", "if", "len", "(", "eigvals", ")", ">", "0", ":", "all_eigvals", "=", "np", ".", "concatenate", "(", "eigvals", ")", "else", ":", "all_eigvals", "=", "np", ".", "empty", "(", "0", ",", "dtype", "=", "get_real_dtype", "(", "matrix", ".", "dtype", ")", ")", "E", "=", "ChargeArray", "(", "all_eigvals", ",", "[", "eigvalscharge", "]", ",", "[", "False", "]", ")", "charges_v", "=", "[", "eigvalscharge", "]", "+", "[", "matrix", ".", "_charges", "[", "o", "]", "for", "o", "in", "matrix", ".", "_order", "[", "0", "]", "]", "order_v", "=", "[", "[", "0", "]", "]", "+", "[", "list", "(", "np", ".", "arange", "(", "1", ",", "len", "(", "matrix", ".", "_order", "[", "0", "]", ")", "+", "1", ")", ")", "]", "flows_v", "=", "[", "True", "]", "+", "[", "matrix", ".", "_flows", "[", "o", "]", "for", "o", "in", "matrix", ".", "_order", "[", "0", "]", "]", "if", "len", "(", "v_blocks", ")", ">", "0", ":", "all_v_blocks", "=", "np", ".", "concatenate", "(", "[", "np", ".", "ravel", "(", "v", ".", "T", ")", "for", "v", "in", "v_blocks", "]", ")", "else", ":", "all_v_blocks", "=", "np", ".", "empty", "(", "0", ",", "dtype", "=", "matrix", ".", "dtype", ")", "V", "=", "BlockSparseTensor", "(", "all_v_blocks", ",", "charges", "=", "charges_v", ",", "flows", "=", "flows_v", ",", "order", "=", "order_v", ",", "check_consistency", "=", "False", ")", ".", "transpose", "(", ")", "return", "E", ",", "V" ]
https://github.com/google/TensorNetwork/blob/e12580f1749493dbe05f474d2fecdec4eaba73c5/tensornetwork/block_sparse/linalg.py#L395-L451
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/wsgiref/validate.py
python
ErrorWrapper.flush
(self)
[]
def flush(self): self.errors.flush()
[ "def", "flush", "(", "self", ")", ":", "self", ".", "errors", ".", "flush", "(", ")" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/wsgiref/validate.py#L235-L236
googlefonts/fontbakery
cb8196c3a636b63654f8370636cb3f438b60d5b1
Lib/fontbakery/profiles/typenetwork.py
python
io_github_abysstypeco_check_ytlc_sanity
(ttFont)
Check if ytlc values are sane in vf
Check if ytlc values are sane in vf
[ "Check", "if", "ytlc", "values", "are", "sane", "in", "vf" ]
def io_github_abysstypeco_check_ytlc_sanity(ttFont): """Check if ytlc values are sane in vf""" passed = True for axis in ttFont['fvar'].axes: if not axis.axisTag == 'ytlc': continue if axis.minValue < 0 or axis.maxValue > 1000: passed = False yield FAIL,\ Message("invalid-range", f'The range of ytlc values ({axis.minValue} - {axis.maxValue})' f'does not conform to the expected range of ytlc which should be min value 0 to max value 1000') if passed: yield PASS, 'ytlc is sane'
[ "def", "io_github_abysstypeco_check_ytlc_sanity", "(", "ttFont", ")", ":", "passed", "=", "True", "for", "axis", "in", "ttFont", "[", "'fvar'", "]", ".", "axes", ":", "if", "not", "axis", ".", "axisTag", "==", "'ytlc'", ":", "continue", "if", "axis", ".", "minValue", "<", "0", "or", "axis", ".", "maxValue", ">", "1000", ":", "passed", "=", "False", "yield", "FAIL", ",", "Message", "(", "\"invalid-range\"", ",", "f'The range of ytlc values ({axis.minValue} - {axis.maxValue})'", "f'does not conform to the expected range of ytlc which should be min value 0 to max value 1000'", ")", "if", "passed", ":", "yield", "PASS", ",", "'ytlc is sane'" ]
https://github.com/googlefonts/fontbakery/blob/cb8196c3a636b63654f8370636cb3f438b60d5b1/Lib/fontbakery/profiles/typenetwork.py#L28-L42
jazzband/tablib
94ffe67e50eb5bfd99d73a4f010e463478a98928
src/tablib/packages/dbfpy/header.py
python
DbfHeader.ignoreErrors
(self, value)
Update `ignoreErrors` flag on self and all fields
Update `ignoreErrors` flag on self and all fields
[ "Update", "ignoreErrors", "flag", "on", "self", "and", "all", "fields" ]
def ignoreErrors(self, value): """Update `ignoreErrors` flag on self and all fields""" self._ignore_errors = value = bool(value) for _field in self.fields: _field.ignoreErrors = value
[ "def", "ignoreErrors", "(", "self", ",", "value", ")", ":", "self", ".", "_ignore_errors", "=", "value", "=", "bool", "(", "value", ")", "for", "_field", "in", "self", ".", "fields", ":", "_field", ".", "ignoreErrors", "=", "value" ]
https://github.com/jazzband/tablib/blob/94ffe67e50eb5bfd99d73a4f010e463478a98928/src/tablib/packages/dbfpy/header.py#L142-L146
ducksboard/libsaas
615981a3336f65be9d51ae95a48aed9ad3bd1c3c
libsaas/services/github/repos.py
python
Repo.commit
(self, sha)
return repocommits.RepoCommit(self, sha)
Return a resource corresponding to a single commit in this repo.
Return a resource corresponding to a single commit in this repo.
[ "Return", "a", "resource", "corresponding", "to", "a", "single", "commit", "in", "this", "repo", "." ]
def commit(self, sha): """ Return a resource corresponding to a single commit in this repo. """ return repocommits.RepoCommit(self, sha)
[ "def", "commit", "(", "self", ",", "sha", ")", ":", "return", "repocommits", ".", "RepoCommit", "(", "self", ",", "sha", ")" ]
https://github.com/ducksboard/libsaas/blob/615981a3336f65be9d51ae95a48aed9ad3bd1c3c/libsaas/services/github/repos.py#L200-L204
keiohta/tf2rl
43523930b3328b28fcf2ce64e6a9a8cf4a403044
tf2rl/tools/img_tools.py
python
random_crop
(input_imgs, output_size)
return np.transpose(cropped_imgs, (0, 2, 3, 1))
Args: input_imgs: np.ndarray Images whose shape is (batch_size, width, height, channels) output_size: Int Output width and height size. Returns:
[]
def random_crop(input_imgs, output_size): """ Args: input_imgs: np.ndarray Images whose shape is (batch_size, width, height, channels) output_size: Int Output width and height size. Returns: """ assert input_imgs.ndim == 4, f"The dimension of input images must be 4, not {len(input_imgs)}" batch_size = input_imgs.shape[0] img_size = input_imgs.shape[1] assert img_size > output_size crop_max = img_size - output_size topleft_x = np.random.randint(0, crop_max, batch_size) topleft_y = np.random.randint(0, crop_max, batch_size) # creates all sliding windows combinations of size (output_size) windows = view_as_windows( input_imgs, (1, output_size, output_size, 1))[..., 0, :, :, 0] # selects a random window for each batch element cropped_imgs = windows[np.arange(batch_size), topleft_x, topleft_y] return np.transpose(cropped_imgs, (0, 2, 3, 1))
[ "def", "random_crop", "(", "input_imgs", ",", "output_size", ")", ":", "assert", "input_imgs", ".", "ndim", "==", "4", ",", "f\"The dimension of input images must be 4, not {len(input_imgs)}\"", "batch_size", "=", "input_imgs", ".", "shape", "[", "0", "]", "img_size", "=", "input_imgs", ".", "shape", "[", "1", "]", "assert", "img_size", ">", "output_size", "crop_max", "=", "img_size", "-", "output_size", "topleft_x", "=", "np", ".", "random", ".", "randint", "(", "0", ",", "crop_max", ",", "batch_size", ")", "topleft_y", "=", "np", ".", "random", ".", "randint", "(", "0", ",", "crop_max", ",", "batch_size", ")", "# creates all sliding windows combinations of size (output_size)", "windows", "=", "view_as_windows", "(", "input_imgs", ",", "(", "1", ",", "output_size", ",", "output_size", ",", "1", ")", ")", "[", "...", ",", "0", ",", ":", ",", ":", ",", "0", "]", "# selects a random window for each batch element", "cropped_imgs", "=", "windows", "[", "np", ".", "arange", "(", "batch_size", ")", ",", "topleft_x", ",", "topleft_y", "]", "return", "np", ".", "transpose", "(", "cropped_imgs", ",", "(", "0", ",", "2", ",", "3", ",", "1", ")", ")" ]
https://github.com/keiohta/tf2rl/blob/43523930b3328b28fcf2ce64e6a9a8cf4a403044/tf2rl/tools/img_tools.py#L6-L33
cournape/Bento
37de23d784407a7c98a4a15770ffc570d5f32d70
bento/private/_yaku/yaku/build_context.py
python
BuildContext.__init__
(self)
[]
def __init__(self): self.object_tasks = [] self.cache = {} self.env = {}
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "object_tasks", "=", "[", "]", "self", ".", "cache", "=", "{", "}", "self", ".", "env", "=", "{", "}" ]
https://github.com/cournape/Bento/blob/37de23d784407a7c98a4a15770ffc570d5f32d70/bento/private/_yaku/yaku/build_context.py#L14-L17
unipacker/unipacker
3e6b94013292fbfbc35ff940bd75ca26acbacdb1
unipacker/shell.py
python
Shell.do_r
(self, args)
Start execution
Start execution
[ "Start", "execution" ]
def do_r(self, args): """Start execution""" if self.started: print("Emulation already started. Interpreting as 'c'") self.do_c(args) return self.shell_event.clear() threading.Thread(target=self.engine.emu).start() self.shell_event.wait()
[ "def", "do_r", "(", "self", ",", "args", ")", ":", "if", "self", ".", "started", ":", "print", "(", "\"Emulation already started. Interpreting as 'c'\"", ")", "self", ".", "do_c", "(", "args", ")", "return", "self", ".", "shell_event", ".", "clear", "(", ")", "threading", ".", "Thread", "(", "target", "=", "self", ".", "engine", ".", "emu", ")", ".", "start", "(", ")", "self", ".", "shell_event", ".", "wait", "(", ")" ]
https://github.com/unipacker/unipacker/blob/3e6b94013292fbfbc35ff940bd75ca26acbacdb1/unipacker/shell.py#L613-L621
ipython/ipyparallel
d35d4fb9501da5b3280b11e83ed633a95f17be1d
ipyparallel/util.py
python
_execute
(code)
helper method for implementing `client.execute` via `client.apply`
helper method for implementing `client.execute` via `client.apply`
[ "helper", "method", "for", "implementing", "client", ".", "execute", "via", "client", ".", "apply" ]
def _execute(code): """helper method for implementing `client.execute` via `client.apply`""" user_ns = get_ipython().user_global_ns exec(code, user_ns)
[ "def", "_execute", "(", "code", ")", ":", "user_ns", "=", "get_ipython", "(", ")", ".", "user_global_ns", "exec", "(", "code", ",", "user_ns", ")" ]
https://github.com/ipython/ipyparallel/blob/d35d4fb9501da5b3280b11e83ed633a95f17be1d/ipyparallel/util.py#L330-L333
smartbgp/yabgp
f073633a813899cd9b413bc28ea2f7737deee141
yabgp/message/attribute/community.py
python
Community.parse
(cls, value)
return community
parse BGP community. :param value:
parse BGP community. :param value:
[ "parse", "BGP", "community", ".", ":", "param", "value", ":" ]
def parse(cls, value): """ parse BGP community. :param value: """ community = [] if value: try: length = len(value) / 2 value_list = list(struct.unpack('!%dH' % length, value)) while value_list: value_type = value_list[0] * 16 * 16 * 16 * 16 + value_list[1] if value_type in bgp_cons.WELL_KNOW_COMMUNITY_INT_2_STR: community.append(bgp_cons.WELL_KNOW_COMMUNITY_INT_2_STR[value_type]) else: community.append("%s:%s" % (value_list[0], value_list[1])) value_list = value_list[2:] except Exception: raise excep.UpdateMessageError( sub_error=bgp_cons.ERR_MSG_UPDATE_ATTR_LEN, data=value) return community
[ "def", "parse", "(", "cls", ",", "value", ")", ":", "community", "=", "[", "]", "if", "value", ":", "try", ":", "length", "=", "len", "(", "value", ")", "/", "2", "value_list", "=", "list", "(", "struct", ".", "unpack", "(", "'!%dH'", "%", "length", ",", "value", ")", ")", "while", "value_list", ":", "value_type", "=", "value_list", "[", "0", "]", "*", "16", "*", "16", "*", "16", "*", "16", "+", "value_list", "[", "1", "]", "if", "value_type", "in", "bgp_cons", ".", "WELL_KNOW_COMMUNITY_INT_2_STR", ":", "community", ".", "append", "(", "bgp_cons", ".", "WELL_KNOW_COMMUNITY_INT_2_STR", "[", "value_type", "]", ")", "else", ":", "community", ".", "append", "(", "\"%s:%s\"", "%", "(", "value_list", "[", "0", "]", ",", "value_list", "[", "1", "]", ")", ")", "value_list", "=", "value_list", "[", "2", ":", "]", "except", "Exception", ":", "raise", "excep", ".", "UpdateMessageError", "(", "sub_error", "=", "bgp_cons", ".", "ERR_MSG_UPDATE_ATTR_LEN", ",", "data", "=", "value", ")", "return", "community" ]
https://github.com/smartbgp/yabgp/blob/f073633a813899cd9b413bc28ea2f7737deee141/yabgp/message/attribute/community.py#L40-L61
pyparallel/pyparallel
11e8c6072d48c8f13641925d17b147bf36ee0ba3
Lib/site-packages/tornado-4.3b2-py3.3-win-amd64.egg/tornado/locks.py
python
Event.is_set
(self)
return self._future.done()
Return ``True`` if the internal flag is true.
Return ``True`` if the internal flag is true.
[ "Return", "True", "if", "the", "internal", "flag", "is", "true", "." ]
def is_set(self): """Return ``True`` if the internal flag is true.""" return self._future.done()
[ "def", "is_set", "(", "self", ")", ":", "return", "self", ".", "_future", ".", "done", "(", ")" ]
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/site-packages/tornado-4.3b2-py3.3-win-amd64.egg/tornado/locks.py#L200-L202
rucio/rucio
6d0d358e04f5431f0b9a98ae40f31af0ddff4833
lib/rucio/api/rse.py
python
get_rse_usage
(rse, issuer, source=None, per_account=False, vo='def')
return [api_update_return_dict(u) for u in usages]
get RSE usage information. :param rse: The RSE name. :param issuer: The issuer account. :param source: dictionary of attributes by which the results should be filtered :param vo: The VO to act on. :returns: List of RSE usage data.
get RSE usage information.
[ "get", "RSE", "usage", "information", "." ]
def get_rse_usage(rse, issuer, source=None, per_account=False, vo='def'): """ get RSE usage information. :param rse: The RSE name. :param issuer: The issuer account. :param source: dictionary of attributes by which the results should be filtered :param vo: The VO to act on. :returns: List of RSE usage data. """ rse_id = rse_module.get_rse_id(rse=rse, vo=vo) usages = rse_module.get_rse_usage(rse_id=rse_id, source=source, per_account=per_account) for u in usages: if 'account_usages' in u: for account_usage in u['account_usages']: account_usage['account'] = account_usage['account'].external return [api_update_return_dict(u) for u in usages]
[ "def", "get_rse_usage", "(", "rse", ",", "issuer", ",", "source", "=", "None", ",", "per_account", "=", "False", ",", "vo", "=", "'def'", ")", ":", "rse_id", "=", "rse_module", ".", "get_rse_id", "(", "rse", "=", "rse", ",", "vo", "=", "vo", ")", "usages", "=", "rse_module", ".", "get_rse_usage", "(", "rse_id", "=", "rse_id", ",", "source", "=", "source", ",", "per_account", "=", "per_account", ")", "for", "u", "in", "usages", ":", "if", "'account_usages'", "in", "u", ":", "for", "account_usage", "in", "u", "[", "'account_usages'", "]", ":", "account_usage", "[", "'account'", "]", "=", "account_usage", "[", "'account'", "]", ".", "external", "return", "[", "api_update_return_dict", "(", "u", ")", "for", "u", "in", "usages", "]" ]
https://github.com/rucio/rucio/blob/6d0d358e04f5431f0b9a98ae40f31af0ddff4833/lib/rucio/api/rse.py#L299-L317
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/smartthings/__init__.py
python
async_setup
(hass: HomeAssistant, config: ConfigType)
return True
Initialize the SmartThings platform.
Initialize the SmartThings platform.
[ "Initialize", "the", "SmartThings", "platform", "." ]
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Initialize the SmartThings platform.""" await setup_smartapp_endpoint(hass) return True
[ "async", "def", "async_setup", "(", "hass", ":", "HomeAssistant", ",", "config", ":", "ConfigType", ")", "->", "bool", ":", "await", "setup_smartapp_endpoint", "(", "hass", ")", "return", "True" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/smartthings/__init__.py#L55-L58
saltstack/salt
fae5bc757ad0f1716483ce7ae180b451545c2058
salt/cloud/clouds/azurearm.py
python
_get_block_blob_service
(kwargs=None)
return BlockBlobService( storage_account, storage_key, sas_token=sas_token, endpoint_suffix=endpoint_suffix, )
Get the block blob storage service.
Get the block blob storage service.
[ "Get", "the", "block", "blob", "storage", "service", "." ]
def _get_block_blob_service(kwargs=None): """ Get the block blob storage service. """ resource_group = kwargs.get("resource_group") or config.get_cloud_config_value( "resource_group", get_configured_provider(), __opts__, search_global=False ) sas_token = kwargs.get("sas_token") or config.get_cloud_config_value( "sas_token", get_configured_provider(), __opts__, search_global=False ) storage_account = kwargs.get("storage_account") or config.get_cloud_config_value( "storage_account", get_configured_provider(), __opts__, search_global=False ) storage_key = kwargs.get("storage_key") or config.get_cloud_config_value( "storage_key", get_configured_provider(), __opts__, search_global=False ) if not resource_group: raise SaltCloudSystemExit("A resource group must be specified") if not storage_account: raise SaltCloudSystemExit("A storage account must be specified") if not storage_key: storconn = get_conn(client_type="storage") storage_keys = storconn.storage_accounts.list_keys( resource_group, storage_account ) storage_keys = {v.key_name: v.value for v in storage_keys.keys} storage_key = next(iter(storage_keys.values())) cloud_env = _get_cloud_environment() endpoint_suffix = cloud_env.suffixes.storage_endpoint return BlockBlobService( storage_account, storage_key, sas_token=sas_token, endpoint_suffix=endpoint_suffix, )
[ "def", "_get_block_blob_service", "(", "kwargs", "=", "None", ")", ":", "resource_group", "=", "kwargs", ".", "get", "(", "\"resource_group\"", ")", "or", "config", ".", "get_cloud_config_value", "(", "\"resource_group\"", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ")", "sas_token", "=", "kwargs", ".", "get", "(", "\"sas_token\"", ")", "or", "config", ".", "get_cloud_config_value", "(", "\"sas_token\"", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ")", "storage_account", "=", "kwargs", ".", "get", "(", "\"storage_account\"", ")", "or", "config", ".", "get_cloud_config_value", "(", "\"storage_account\"", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ")", "storage_key", "=", "kwargs", ".", "get", "(", "\"storage_key\"", ")", "or", "config", ".", "get_cloud_config_value", "(", "\"storage_key\"", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ")", "if", "not", "resource_group", ":", "raise", "SaltCloudSystemExit", "(", "\"A resource group must be specified\"", ")", "if", "not", "storage_account", ":", "raise", "SaltCloudSystemExit", "(", "\"A storage account must be specified\"", ")", "if", "not", "storage_key", ":", "storconn", "=", "get_conn", "(", "client_type", "=", "\"storage\"", ")", "storage_keys", "=", "storconn", ".", "storage_accounts", ".", "list_keys", "(", "resource_group", ",", "storage_account", ")", "storage_keys", "=", "{", "v", ".", "key_name", ":", "v", ".", "value", "for", "v", "in", "storage_keys", ".", "keys", "}", "storage_key", "=", "next", "(", "iter", "(", "storage_keys", ".", "values", "(", ")", ")", ")", "cloud_env", "=", "_get_cloud_environment", "(", ")", "endpoint_suffix", "=", "cloud_env", ".", "suffixes", ".", "storage_endpoint", "return", "BlockBlobService", "(", "storage_account", ",", "storage_key", ",", "sas_token", "=", "sas_token", ",", "endpoint_suffix", "=", "endpoint_suffix", ",", ")" ]
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/cloud/clouds/azurearm.py#L1583-L1623
lightning-power-users/node-launcher
5a2a646b01803fe284fa18bee99c13c7cbf04498
node_launcher/node_set/lnd/lnd_client/rpc_pb2_grpc.py
python
LightningServicer.ListInvoices
(self, request, context)
* lncli: `listinvoices` ListInvoices returns a list of all the invoices currently stored within the database. Any active debug invoices are ignored. It has full support for paginated responses, allowing users to query for specific invoices through their add_index. This can be done by using either the first_index_offset or last_index_offset fields included in the response as the index_offset of the next request. The reversed flag is set by default in order to paginate backwards. If you wish to paginate forwards, you must explicitly set the flag to false. If none of the parameters are specified, then the last 100 invoices will be returned.
* lncli: `listinvoices` ListInvoices returns a list of all the invoices currently stored within the database. Any active debug invoices are ignored. It has full support for paginated responses, allowing users to query for specific invoices through their add_index. This can be done by using either the first_index_offset or last_index_offset fields included in the response as the index_offset of the next request. The reversed flag is set by default in order to paginate backwards. If you wish to paginate forwards, you must explicitly set the flag to false. If none of the parameters are specified, then the last 100 invoices will be returned.
[ "*", "lncli", ":", "listinvoices", "ListInvoices", "returns", "a", "list", "of", "all", "the", "invoices", "currently", "stored", "within", "the", "database", ".", "Any", "active", "debug", "invoices", "are", "ignored", ".", "It", "has", "full", "support", "for", "paginated", "responses", "allowing", "users", "to", "query", "for", "specific", "invoices", "through", "their", "add_index", ".", "This", "can", "be", "done", "by", "using", "either", "the", "first_index_offset", "or", "last_index_offset", "fields", "included", "in", "the", "response", "as", "the", "index_offset", "of", "the", "next", "request", ".", "The", "reversed", "flag", "is", "set", "by", "default", "in", "order", "to", "paginate", "backwards", ".", "If", "you", "wish", "to", "paginate", "forwards", "you", "must", "explicitly", "set", "the", "flag", "to", "false", ".", "If", "none", "of", "the", "parameters", "are", "specified", "then", "the", "last", "100", "invoices", "will", "be", "returned", "." ]
def ListInvoices(self, request, context): """* lncli: `listinvoices` ListInvoices returns a list of all the invoices currently stored within the database. Any active debug invoices are ignored. It has full support for paginated responses, allowing users to query for specific invoices through their add_index. This can be done by using either the first_index_offset or last_index_offset fields included in the response as the index_offset of the next request. The reversed flag is set by default in order to paginate backwards. If you wish to paginate forwards, you must explicitly set the flag to false. If none of the parameters are specified, then the last 100 invoices will be returned. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
[ "def", "ListInvoices", "(", "self", ",", "request", ",", "context", ")", ":", "context", ".", "set_code", "(", "grpc", ".", "StatusCode", ".", "UNIMPLEMENTED", ")", "context", ".", "set_details", "(", "'Method not implemented!'", ")", "raise", "NotImplementedError", "(", "'Method not implemented!'", ")" ]
https://github.com/lightning-power-users/node-launcher/blob/5a2a646b01803fe284fa18bee99c13c7cbf04498/node_launcher/node_set/lnd/lnd_client/rpc_pb2_grpc.py#L610-L624
DataDog/integrations-core
934674b29d94b70ccc008f76ea172d0cdae05e1e
gitlab_runner/datadog_checks/gitlab_runner/config_models/defaults.py
python
instance_metrics
(field, value)
return get_default_field_value(field, value)
[]
def instance_metrics(field, value): return get_default_field_value(field, value)
[ "def", "instance_metrics", "(", "field", ",", "value", ")", ":", "return", "get_default_field_value", "(", "field", ",", "value", ")" ]
https://github.com/DataDog/integrations-core/blob/934674b29d94b70ccc008f76ea172d0cdae05e1e/gitlab_runner/datadog_checks/gitlab_runner/config_models/defaults.py#L149-L150
google/apitools
31cad2d904f356872d2965687e84b2d87ee2cdd3
apitools/base/py/base_api.py
python
BaseApiService.__ConstructQueryParams
(self, query_params, request, global_params)
return query_info
Construct a dictionary of query parameters for this request.
Construct a dictionary of query parameters for this request.
[ "Construct", "a", "dictionary", "of", "query", "parameters", "for", "this", "request", "." ]
def __ConstructQueryParams(self, query_params, request, global_params): """Construct a dictionary of query parameters for this request.""" # First, handle the global params. global_params = self.__CombineGlobalParams( global_params, self.__client.global_params) global_param_names = util.MapParamNames( [x.name for x in self.__client.params_type.all_fields()], self.__client.params_type) global_params_type = type(global_params) query_info = dict( (param, self.__FinalUrlValue(getattr(global_params, param), getattr(global_params_type, param))) for param in global_param_names) # Next, add the query params. query_param_names = util.MapParamNames(query_params, type(request)) request_type = type(request) query_info.update( (param, self.__FinalUrlValue(getattr(request, param, None), getattr(request_type, param))) for param in query_param_names) query_info = dict((k, v) for k, v in query_info.items() if v is not None) query_info = self.__EncodePrettyPrint(query_info) query_info = util.MapRequestParams(query_info, type(request)) return query_info
[ "def", "__ConstructQueryParams", "(", "self", ",", "query_params", ",", "request", ",", "global_params", ")", ":", "# First, handle the global params.", "global_params", "=", "self", ".", "__CombineGlobalParams", "(", "global_params", ",", "self", ".", "__client", ".", "global_params", ")", "global_param_names", "=", "util", ".", "MapParamNames", "(", "[", "x", ".", "name", "for", "x", "in", "self", ".", "__client", ".", "params_type", ".", "all_fields", "(", ")", "]", ",", "self", ".", "__client", ".", "params_type", ")", "global_params_type", "=", "type", "(", "global_params", ")", "query_info", "=", "dict", "(", "(", "param", ",", "self", ".", "__FinalUrlValue", "(", "getattr", "(", "global_params", ",", "param", ")", ",", "getattr", "(", "global_params_type", ",", "param", ")", ")", ")", "for", "param", "in", "global_param_names", ")", "# Next, add the query params.", "query_param_names", "=", "util", ".", "MapParamNames", "(", "query_params", ",", "type", "(", "request", ")", ")", "request_type", "=", "type", "(", "request", ")", "query_info", ".", "update", "(", "(", "param", ",", "self", ".", "__FinalUrlValue", "(", "getattr", "(", "request", ",", "param", ",", "None", ")", ",", "getattr", "(", "request_type", ",", "param", ")", ")", ")", "for", "param", "in", "query_param_names", ")", "query_info", "=", "dict", "(", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "query_info", ".", "items", "(", ")", "if", "v", "is", "not", "None", ")", "query_info", "=", "self", ".", "__EncodePrettyPrint", "(", "query_info", ")", "query_info", "=", "util", ".", "MapRequestParams", "(", "query_info", ",", "type", "(", "request", ")", ")", "return", "query_info" ]
https://github.com/google/apitools/blob/31cad2d904f356872d2965687e84b2d87ee2cdd3/apitools/base/py/base_api.py#L553-L579
andresriancho/w3af
cd22e5252243a87aaa6d0ddea47cf58dacfe00a9
w3af/core/ui/gui/confpanel.py
python
OnlyOptions._changedAnyWidget
(self, like_initial)
Adjust the save/revert buttons and alert the tree of the change. :param like_initial: if the widgets are modified or not. It only will be called if any widget changed its state, through a propagation buffer.
Adjust the save/revert buttons and alert the tree of the change.
[ "Adjust", "the", "save", "/", "revert", "buttons", "and", "alert", "the", "tree", "of", "the", "change", "." ]
def _changedAnyWidget(self, like_initial): """Adjust the save/revert buttons and alert the tree of the change. :param like_initial: if the widgets are modified or not. It only will be called if any widget changed its state, through a propagation buffer. """ self.save_btn.set_sensitive(not like_initial) self.rvrt_btn.set_sensitive(not like_initial) self.parentwidg.config_changed(like_initial) self.saved_successfully = False
[ "def", "_changedAnyWidget", "(", "self", ",", "like_initial", ")", ":", "self", ".", "save_btn", ".", "set_sensitive", "(", "not", "like_initial", ")", "self", ".", "rvrt_btn", ".", "set_sensitive", "(", "not", "like_initial", ")", "self", ".", "parentwidg", ".", "config_changed", "(", "like_initial", ")", "self", ".", "saved_successfully", "=", "False" ]
https://github.com/andresriancho/w3af/blob/cd22e5252243a87aaa6d0ddea47cf58dacfe00a9/w3af/core/ui/gui/confpanel.py#L158-L169
gbeced/pyalgotrade
ad2bcc6b25c06c66eee4a8d522ce844504d8ec62
pyalgotrade/tools/quandl.py
python
download_daily_bars
(sourceCode, tableCode, year, csvFile, authToken=None)
Download daily bars from Quandl for a given year. :param sourceCode: The dataset's source code. :type sourceCode: string. :param tableCode: The dataset's table code. :type tableCode: string. :param year: The year. :type year: int. :param csvFile: The path to the CSV file to write. :type csvFile: string. :param authToken: Optional. An authentication token needed if you're doing more than 50 calls per day. :type authToken: string.
Download daily bars from Quandl for a given year.
[ "Download", "daily", "bars", "from", "Quandl", "for", "a", "given", "year", "." ]
def download_daily_bars(sourceCode, tableCode, year, csvFile, authToken=None): """Download daily bars from Quandl for a given year. :param sourceCode: The dataset's source code. :type sourceCode: string. :param tableCode: The dataset's table code. :type tableCode: string. :param year: The year. :type year: int. :param csvFile: The path to the CSV file to write. :type csvFile: string. :param authToken: Optional. An authentication token needed if you're doing more than 50 calls per day. :type authToken: string. """ bars = download_csv(sourceCode, tableCode, datetime.date(year, 1, 1), datetime.date(year, 12, 31), "daily", authToken) f = open(csvFile, "w") f.write(bars) f.close()
[ "def", "download_daily_bars", "(", "sourceCode", ",", "tableCode", ",", "year", ",", "csvFile", ",", "authToken", "=", "None", ")", ":", "bars", "=", "download_csv", "(", "sourceCode", ",", "tableCode", ",", "datetime", ".", "date", "(", "year", ",", "1", ",", "1", ")", ",", "datetime", ".", "date", "(", "year", ",", "12", ",", "31", ")", ",", "\"daily\"", ",", "authToken", ")", "f", "=", "open", "(", "csvFile", ",", "\"w\"", ")", "f", ".", "write", "(", "bars", ")", "f", ".", "close", "(", ")" ]
https://github.com/gbeced/pyalgotrade/blob/ad2bcc6b25c06c66eee4a8d522ce844504d8ec62/pyalgotrade/tools/quandl.py#L49-L67
mypaint/mypaint
90b36dbc7b8bd2f323383f7edf608a5e0a3a1a33
gui/cursor.py
python
CustomCursorMaker._get_overlay_cursor
(self, icon_pixbuf, cursor_name=Name.ARROW)
return cursor
Returns an overlay cursor. Not cached. :param icon_pixbuf: a GdkPixbuf.Pixbuf containing a small (~22px) image, or None :param cursor_name: name of a pixmaps/ cursor image to use for the pointer part, minus the .png The overlay icon will be overlaid to the bottom and right of the returned cursor image.
Returns an overlay cursor. Not cached.
[ "Returns", "an", "overlay", "cursor", ".", "Not", "cached", "." ]
def _get_overlay_cursor(self, icon_pixbuf, cursor_name=Name.ARROW): """Returns an overlay cursor. Not cached. :param icon_pixbuf: a GdkPixbuf.Pixbuf containing a small (~22px) image, or None :param cursor_name: name of a pixmaps/ cursor image to use for the pointer part, minus the .png The overlay icon will be overlaid to the bottom and right of the returned cursor image. """ pointer_pixbuf = getattr(self.app.pixmaps, cursor_name) pointer_w = pointer_pixbuf.get_width() pointer_h = pointer_pixbuf.get_height() hot_x, hot_y = self.CURSOR_HOTSPOTS.get(cursor_name, (None, None)) if hot_x is None: hot_x = 1 hot_y = 1 cursor_pixbuf = GdkPixbuf.Pixbuf.new(GdkPixbuf.Colorspace.RGB, True, 8, 32, 32) cursor_pixbuf.fill(0x00000000) pointer_pixbuf.composite( cursor_pixbuf, 0, 0, pointer_w, pointer_h, 0, 0, 1, 1, GdkPixbuf.InterpType.NEAREST, 255 ) if icon_pixbuf is not None: icon_w = icon_pixbuf.get_width() icon_h = icon_pixbuf.get_height() icon_x = 32 - icon_w icon_y = 32 - icon_h icon_pixbuf.composite( cursor_pixbuf, icon_x, icon_y, icon_w, icon_h, icon_x, icon_y, 1, 1, GdkPixbuf.InterpType.NEAREST, 255 ) display = self.app.drawWindow.get_display() cursor = Gdk.Cursor.new_from_pixbuf(display, cursor_pixbuf, hot_x, hot_y) return cursor
[ "def", "_get_overlay_cursor", "(", "self", ",", "icon_pixbuf", ",", "cursor_name", "=", "Name", ".", "ARROW", ")", ":", "pointer_pixbuf", "=", "getattr", "(", "self", ".", "app", ".", "pixmaps", ",", "cursor_name", ")", "pointer_w", "=", "pointer_pixbuf", ".", "get_width", "(", ")", "pointer_h", "=", "pointer_pixbuf", ".", "get_height", "(", ")", "hot_x", ",", "hot_y", "=", "self", ".", "CURSOR_HOTSPOTS", ".", "get", "(", "cursor_name", ",", "(", "None", ",", "None", ")", ")", "if", "hot_x", "is", "None", ":", "hot_x", "=", "1", "hot_y", "=", "1", "cursor_pixbuf", "=", "GdkPixbuf", ".", "Pixbuf", ".", "new", "(", "GdkPixbuf", ".", "Colorspace", ".", "RGB", ",", "True", ",", "8", ",", "32", ",", "32", ")", "cursor_pixbuf", ".", "fill", "(", "0x00000000", ")", "pointer_pixbuf", ".", "composite", "(", "cursor_pixbuf", ",", "0", ",", "0", ",", "pointer_w", ",", "pointer_h", ",", "0", ",", "0", ",", "1", ",", "1", ",", "GdkPixbuf", ".", "InterpType", ".", "NEAREST", ",", "255", ")", "if", "icon_pixbuf", "is", "not", "None", ":", "icon_w", "=", "icon_pixbuf", ".", "get_width", "(", ")", "icon_h", "=", "icon_pixbuf", ".", "get_height", "(", ")", "icon_x", "=", "32", "-", "icon_w", "icon_y", "=", "32", "-", "icon_h", "icon_pixbuf", ".", "composite", "(", "cursor_pixbuf", ",", "icon_x", ",", "icon_y", ",", "icon_w", ",", "icon_h", ",", "icon_x", ",", "icon_y", ",", "1", ",", "1", ",", "GdkPixbuf", ".", "InterpType", ".", "NEAREST", ",", "255", ")", "display", "=", "self", ".", "app", ".", "drawWindow", ".", "get_display", "(", ")", "cursor", "=", "Gdk", ".", "Cursor", ".", "new_from_pixbuf", "(", "display", ",", "cursor_pixbuf", ",", "hot_x", ",", "hot_y", ")", "return", "cursor" ]
https://github.com/mypaint/mypaint/blob/90b36dbc7b8bd2f323383f7edf608a5e0a3a1a33/gui/cursor.py#L313-L355
pyparallel/pyparallel
11e8c6072d48c8f13641925d17b147bf36ee0ba3
Lib/distutils/command/install.py
python
install.handle_extra_path
(self)
Set `path_file` and `extra_dirs` using `extra_path`.
Set `path_file` and `extra_dirs` using `extra_path`.
[ "Set", "path_file", "and", "extra_dirs", "using", "extra_path", "." ]
def handle_extra_path(self): """Set `path_file` and `extra_dirs` using `extra_path`.""" if self.extra_path is None: self.extra_path = self.distribution.extra_path if self.extra_path is not None: if isinstance(self.extra_path, str): self.extra_path = self.extra_path.split(',') if len(self.extra_path) == 1: path_file = extra_dirs = self.extra_path[0] elif len(self.extra_path) == 2: path_file, extra_dirs = self.extra_path else: raise DistutilsOptionError( "'extra_path' option must be a list, tuple, or " "comma-separated string with 1 or 2 elements") # convert to local form in case Unix notation used (as it # should be in setup scripts) extra_dirs = convert_path(extra_dirs) else: path_file = None extra_dirs = '' # XXX should we warn if path_file and not extra_dirs? (in which # case the path file would be harmless but pointless) self.path_file = path_file self.extra_dirs = extra_dirs
[ "def", "handle_extra_path", "(", "self", ")", ":", "if", "self", ".", "extra_path", "is", "None", ":", "self", ".", "extra_path", "=", "self", ".", "distribution", ".", "extra_path", "if", "self", ".", "extra_path", "is", "not", "None", ":", "if", "isinstance", "(", "self", ".", "extra_path", ",", "str", ")", ":", "self", ".", "extra_path", "=", "self", ".", "extra_path", ".", "split", "(", "','", ")", "if", "len", "(", "self", ".", "extra_path", ")", "==", "1", ":", "path_file", "=", "extra_dirs", "=", "self", ".", "extra_path", "[", "0", "]", "elif", "len", "(", "self", ".", "extra_path", ")", "==", "2", ":", "path_file", ",", "extra_dirs", "=", "self", ".", "extra_path", "else", ":", "raise", "DistutilsOptionError", "(", "\"'extra_path' option must be a list, tuple, or \"", "\"comma-separated string with 1 or 2 elements\"", ")", "# convert to local form in case Unix notation used (as it", "# should be in setup scripts)", "extra_dirs", "=", "convert_path", "(", "extra_dirs", ")", "else", ":", "path_file", "=", "None", "extra_dirs", "=", "''", "# XXX should we warn if path_file and not extra_dirs? (in which", "# case the path file would be harmless but pointless)", "self", ".", "path_file", "=", "path_file", "self", ".", "extra_dirs", "=", "extra_dirs" ]
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/distutils/command/install.py#L517-L545
dimagi/commcare-hq
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
corehq/apps/reports/filters/select.py
python
SelectOpenCloseFilter.options
(self)
return [ ('open', _("Only Open")), ('closed', _("Only Closed")), ]
[]
def options(self): return [ ('open', _("Only Open")), ('closed', _("Only Closed")), ]
[ "def", "options", "(", "self", ")", ":", "return", "[", "(", "'open'", ",", "_", "(", "\"Only Open\"", ")", ")", ",", "(", "'closed'", ",", "_", "(", "\"Only Closed\"", ")", ")", ",", "]" ]
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/apps/reports/filters/select.py#L95-L99
aertslab/pySCENIC
c66455cdc0d736d67963b218a8ebce61dc4eef82
src/pyscenic/rss.py
python
regulon_specificity_scores
(auc_mtx, cell_type_series)
return pd.DataFrame(data=rss_values, index=cell_types, columns=regulons)
Calculates the Regulon Specificty Scores (RSS). [doi: 10.1016/j.celrep.2018.10.045] :param auc_mtx: The dataframe with the AUC values for all cells and regulons (n_cells x n_regulons). :param cell_type_series: A pandas Series object with cell identifiers as index and cell type labels as values. :return: A pandas dataframe with the RSS values (cell type x regulon).
Calculates the Regulon Specificty Scores (RSS). [doi: 10.1016/j.celrep.2018.10.045]
[ "Calculates", "the", "Regulon", "Specificty", "Scores", "(", "RSS", ")", ".", "[", "doi", ":", "10", ".", "1016", "/", "j", ".", "celrep", ".", "2018", ".", "10", ".", "045", "]" ]
def regulon_specificity_scores(auc_mtx, cell_type_series): """ Calculates the Regulon Specificty Scores (RSS). [doi: 10.1016/j.celrep.2018.10.045] :param auc_mtx: The dataframe with the AUC values for all cells and regulons (n_cells x n_regulons). :param cell_type_series: A pandas Series object with cell identifiers as index and cell type labels as values. :return: A pandas dataframe with the RSS values (cell type x regulon). """ cell_types = list(cell_type_series.unique()) n_types = len(cell_types) regulons = list(auc_mtx.columns) n_regulons = len(regulons) rss_values = np.empty(shape=(n_types, n_regulons), dtype=np.float) def rss(aucs, labels): # jensenshannon function provides distance which is the sqrt of the JS divergence. return 1.0 - jensenshannon(aucs / aucs.sum(), labels / labels.sum()) for cidx, regulon_name in enumerate(regulons): for ridx, cell_type in enumerate(cell_types): rss_values[ridx, cidx] = rss(auc_mtx[regulon_name], (cell_type_series == cell_type).astype(int)) return pd.DataFrame(data=rss_values, index=cell_types, columns=regulons)
[ "def", "regulon_specificity_scores", "(", "auc_mtx", ",", "cell_type_series", ")", ":", "cell_types", "=", "list", "(", "cell_type_series", ".", "unique", "(", ")", ")", "n_types", "=", "len", "(", "cell_types", ")", "regulons", "=", "list", "(", "auc_mtx", ".", "columns", ")", "n_regulons", "=", "len", "(", "regulons", ")", "rss_values", "=", "np", ".", "empty", "(", "shape", "=", "(", "n_types", ",", "n_regulons", ")", ",", "dtype", "=", "np", ".", "float", ")", "def", "rss", "(", "aucs", ",", "labels", ")", ":", "# jensenshannon function provides distance which is the sqrt of the JS divergence.", "return", "1.0", "-", "jensenshannon", "(", "aucs", "/", "aucs", ".", "sum", "(", ")", ",", "labels", "/", "labels", ".", "sum", "(", ")", ")", "for", "cidx", ",", "regulon_name", "in", "enumerate", "(", "regulons", ")", ":", "for", "ridx", ",", "cell_type", "in", "enumerate", "(", "cell_types", ")", ":", "rss_values", "[", "ridx", ",", "cidx", "]", "=", "rss", "(", "auc_mtx", "[", "regulon_name", "]", ",", "(", "cell_type_series", "==", "cell_type", ")", ".", "astype", "(", "int", ")", ")", "return", "pd", ".", "DataFrame", "(", "data", "=", "rss_values", ",", "index", "=", "cell_types", ",", "columns", "=", "regulons", ")" ]
https://github.com/aertslab/pySCENIC/blob/c66455cdc0d736d67963b218a8ebce61dc4eef82/src/pyscenic/rss.py#L8-L31
edisonlz/fastor
342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3
base/site-packages/tencentcloud/partners/v20180321/models.py
python
DescribeAgentBillsResponse.__init__
(self)
:param TotalCount: 符合查询条件列表总数量 :type TotalCount: int :param AgentBillSet: 业务明细列表 :type AgentBillSet: list of AgentBillElem :param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。 :type RequestId: str
:param TotalCount: 符合查询条件列表总数量 :type TotalCount: int :param AgentBillSet: 业务明细列表 :type AgentBillSet: list of AgentBillElem :param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。 :type RequestId: str
[ ":", "param", "TotalCount", ":", "符合查询条件列表总数量", ":", "type", "TotalCount", ":", "int", ":", "param", "AgentBillSet", ":", "业务明细列表", ":", "type", "AgentBillSet", ":", "list", "of", "AgentBillElem", ":", "param", "RequestId", ":", "唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。", ":", "type", "RequestId", ":", "str" ]
def __init__(self): """ :param TotalCount: 符合查询条件列表总数量 :type TotalCount: int :param AgentBillSet: 业务明细列表 :type AgentBillSet: list of AgentBillElem :param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。 :type RequestId: str """ self.TotalCount = None self.AgentBillSet = None self.RequestId = None
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "TotalCount", "=", "None", "self", ".", "AgentBillSet", "=", "None", "self", ".", "RequestId", "=", "None" ]
https://github.com/edisonlz/fastor/blob/342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3/base/site-packages/tencentcloud/partners/v20180321/models.py#L217-L228
PokemonGoF/PokemonGo-Bot-Desktop
4bfa94f0183406c6a86f93645eff7abd3ad4ced8
build/pywin/Lib/decimal.py
python
Decimal._power_exact
(self, other, p)
return _dec_from_triple(0, str_xc+'0'*zeros, xe-zeros)
Attempt to compute self**other exactly. Given Decimals self and other and an integer p, attempt to compute an exact result for the power self**other, with p digits of precision. Return None if self**other is not exactly representable in p digits. Assumes that elimination of special cases has already been performed: self and other must both be nonspecial; self must be positive and not numerically equal to 1; other must be nonzero. For efficiency, other._exp should not be too large, so that 10**abs(other._exp) is a feasible calculation.
Attempt to compute self**other exactly.
[ "Attempt", "to", "compute", "self", "**", "other", "exactly", "." ]
def _power_exact(self, other, p): """Attempt to compute self**other exactly. Given Decimals self and other and an integer p, attempt to compute an exact result for the power self**other, with p digits of precision. Return None if self**other is not exactly representable in p digits. Assumes that elimination of special cases has already been performed: self and other must both be nonspecial; self must be positive and not numerically equal to 1; other must be nonzero. For efficiency, other._exp should not be too large, so that 10**abs(other._exp) is a feasible calculation.""" # In the comments below, we write x for the value of self and y for the # value of other. Write x = xc*10**xe and abs(y) = yc*10**ye, with xc # and yc positive integers not divisible by 10. # The main purpose of this method is to identify the *failure* # of x**y to be exactly representable with as little effort as # possible. So we look for cheap and easy tests that # eliminate the possibility of x**y being exact. Only if all # these tests are passed do we go on to actually compute x**y. # Here's the main idea. Express y as a rational number m/n, with m and # n relatively prime and n>0. Then for x**y to be exactly # representable (at *any* precision), xc must be the nth power of a # positive integer and xe must be divisible by n. If y is negative # then additionally xc must be a power of either 2 or 5, hence a power # of 2**n or 5**n. # # There's a limit to how small |y| can be: if y=m/n as above # then: # # (1) if xc != 1 then for the result to be representable we # need xc**(1/n) >= 2, and hence also xc**|y| >= 2. So # if |y| <= 1/nbits(xc) then xc < 2**nbits(xc) <= # 2**(1/|y|), hence xc**|y| < 2 and the result is not # representable. # # (2) if xe != 0, |xe|*(1/n) >= 1, so |xe|*|y| >= 1. Hence if # |y| < 1/|xe| then the result is not representable. # # Note that since x is not equal to 1, at least one of (1) and # (2) must apply. Now |y| < 1/nbits(xc) iff |yc|*nbits(xc) < # 10**-ye iff len(str(|yc|*nbits(xc)) <= -ye. # # There's also a limit to how large y can be, at least if it's # positive: the normalized result will have coefficient xc**y, # so if it's representable then xc**y < 10**p, and y < # p/log10(xc). Hence if y*log10(xc) >= p then the result is # not exactly representable. # if len(str(abs(yc*xe)) <= -ye then abs(yc*xe) < 10**-ye, # so |y| < 1/xe and the result is not representable. # Similarly, len(str(abs(yc)*xc_bits)) <= -ye implies |y| # < 1/nbits(xc). x = _WorkRep(self) xc, xe = x.int, x.exp while xc % 10 == 0: xc //= 10 xe += 1 y = _WorkRep(other) yc, ye = y.int, y.exp while yc % 10 == 0: yc //= 10 ye += 1 # case where xc == 1: result is 10**(xe*y), with xe*y # required to be an integer if xc == 1: xe *= yc # result is now 10**(xe * 10**ye); xe * 10**ye must be integral while xe % 10 == 0: xe //= 10 ye += 1 if ye < 0: return None exponent = xe * 10**ye if y.sign == 1: exponent = -exponent # if other is a nonnegative integer, use ideal exponent if other._isinteger() and other._sign == 0: ideal_exponent = self._exp*int(other) zeros = min(exponent-ideal_exponent, p-1) else: zeros = 0 return _dec_from_triple(0, '1' + '0'*zeros, exponent-zeros) # case where y is negative: xc must be either a power # of 2 or a power of 5. if y.sign == 1: last_digit = xc % 10 if last_digit in (2,4,6,8): # quick test for power of 2 if xc & -xc != xc: return None # now xc is a power of 2; e is its exponent e = _nbits(xc)-1 # We now have: # # x = 2**e * 10**xe, e > 0, and y < 0. # # The exact result is: # # x**y = 5**(-e*y) * 10**(e*y + xe*y) # # provided that both e*y and xe*y are integers. Note that if # 5**(-e*y) >= 10**p, then the result can't be expressed # exactly with p digits of precision. # # Using the above, we can guard against large values of ye. # 93/65 is an upper bound for log(10)/log(5), so if # # ye >= len(str(93*p//65)) # # then # # -e*y >= -y >= 10**ye > 93*p/65 > p*log(10)/log(5), # # so 5**(-e*y) >= 10**p, and the coefficient of the result # can't be expressed in p digits. # emax >= largest e such that 5**e < 10**p. emax = p*93//65 if ye >= len(str(emax)): return None # Find -e*y and -xe*y; both must be integers e = _decimal_lshift_exact(e * yc, ye) xe = _decimal_lshift_exact(xe * yc, ye) if e is None or xe is None: return None if e > emax: return None xc = 5**e elif last_digit == 5: # e >= log_5(xc) if xc is a power of 5; we have # equality all the way up to xc=5**2658 e = _nbits(xc)*28//65 xc, remainder = divmod(5**e, xc) if remainder: return None while xc % 5 == 0: xc //= 5 e -= 1 # Guard against large values of ye, using the same logic as in # the 'xc is a power of 2' branch. 10/3 is an upper bound for # log(10)/log(2). emax = p*10//3 if ye >= len(str(emax)): return None e = _decimal_lshift_exact(e * yc, ye) xe = _decimal_lshift_exact(xe * yc, ye) if e is None or xe is None: return None if e > emax: return None xc = 2**e else: return None if xc >= 10**p: return None xe = -e-xe return _dec_from_triple(0, str(xc), xe) # now y is positive; find m and n such that y = m/n if ye >= 0: m, n = yc*10**ye, 1 else: if xe != 0 and len(str(abs(yc*xe))) <= -ye: return None xc_bits = _nbits(xc) if xc != 1 and len(str(abs(yc)*xc_bits)) <= -ye: return None m, n = yc, 10**(-ye) while m % 2 == n % 2 == 0: m //= 2 n //= 2 while m % 5 == n % 5 == 0: m //= 5 n //= 5 # compute nth root of xc*10**xe if n > 1: # if 1 < xc < 2**n then xc isn't an nth power if xc != 1 and xc_bits <= n: return None xe, rem = divmod(xe, n) if rem != 0: return None # compute nth root of xc using Newton's method a = 1L << -(-_nbits(xc)//n) # initial estimate while True: q, r = divmod(xc, a**(n-1)) if a <= q: break else: a = (a*(n-1) + q)//n if not (a == q and r == 0): return None xc = a # now xc*10**xe is the nth root of the original xc*10**xe # compute mth power of xc*10**xe # if m > p*100//_log10_lb(xc) then m > p/log10(xc), hence xc**m > # 10**p and the result is not representable. if xc > 1 and m > p*100//_log10_lb(xc): return None xc = xc**m xe *= m if xc > 10**p: return None # by this point the result *is* exactly representable # adjust the exponent to get as close as possible to the ideal # exponent, if necessary str_xc = str(xc) if other._isinteger() and other._sign == 0: ideal_exponent = self._exp*int(other) zeros = min(xe-ideal_exponent, p-len(str_xc)) else: zeros = 0 return _dec_from_triple(0, str_xc+'0'*zeros, xe-zeros)
[ "def", "_power_exact", "(", "self", ",", "other", ",", "p", ")", ":", "# In the comments below, we write x for the value of self and y for the", "# value of other. Write x = xc*10**xe and abs(y) = yc*10**ye, with xc", "# and yc positive integers not divisible by 10.", "# The main purpose of this method is to identify the *failure*", "# of x**y to be exactly representable with as little effort as", "# possible. So we look for cheap and easy tests that", "# eliminate the possibility of x**y being exact. Only if all", "# these tests are passed do we go on to actually compute x**y.", "# Here's the main idea. Express y as a rational number m/n, with m and", "# n relatively prime and n>0. Then for x**y to be exactly", "# representable (at *any* precision), xc must be the nth power of a", "# positive integer and xe must be divisible by n. If y is negative", "# then additionally xc must be a power of either 2 or 5, hence a power", "# of 2**n or 5**n.", "#", "# There's a limit to how small |y| can be: if y=m/n as above", "# then:", "#", "# (1) if xc != 1 then for the result to be representable we", "# need xc**(1/n) >= 2, and hence also xc**|y| >= 2. So", "# if |y| <= 1/nbits(xc) then xc < 2**nbits(xc) <=", "# 2**(1/|y|), hence xc**|y| < 2 and the result is not", "# representable.", "#", "# (2) if xe != 0, |xe|*(1/n) >= 1, so |xe|*|y| >= 1. Hence if", "# |y| < 1/|xe| then the result is not representable.", "#", "# Note that since x is not equal to 1, at least one of (1) and", "# (2) must apply. Now |y| < 1/nbits(xc) iff |yc|*nbits(xc) <", "# 10**-ye iff len(str(|yc|*nbits(xc)) <= -ye.", "#", "# There's also a limit to how large y can be, at least if it's", "# positive: the normalized result will have coefficient xc**y,", "# so if it's representable then xc**y < 10**p, and y <", "# p/log10(xc). Hence if y*log10(xc) >= p then the result is", "# not exactly representable.", "# if len(str(abs(yc*xe)) <= -ye then abs(yc*xe) < 10**-ye,", "# so |y| < 1/xe and the result is not representable.", "# Similarly, len(str(abs(yc)*xc_bits)) <= -ye implies |y|", "# < 1/nbits(xc).", "x", "=", "_WorkRep", "(", "self", ")", "xc", ",", "xe", "=", "x", ".", "int", ",", "x", ".", "exp", "while", "xc", "%", "10", "==", "0", ":", "xc", "//=", "10", "xe", "+=", "1", "y", "=", "_WorkRep", "(", "other", ")", "yc", ",", "ye", "=", "y", ".", "int", ",", "y", ".", "exp", "while", "yc", "%", "10", "==", "0", ":", "yc", "//=", "10", "ye", "+=", "1", "# case where xc == 1: result is 10**(xe*y), with xe*y", "# required to be an integer", "if", "xc", "==", "1", ":", "xe", "*=", "yc", "# result is now 10**(xe * 10**ye); xe * 10**ye must be integral", "while", "xe", "%", "10", "==", "0", ":", "xe", "//=", "10", "ye", "+=", "1", "if", "ye", "<", "0", ":", "return", "None", "exponent", "=", "xe", "*", "10", "**", "ye", "if", "y", ".", "sign", "==", "1", ":", "exponent", "=", "-", "exponent", "# if other is a nonnegative integer, use ideal exponent", "if", "other", ".", "_isinteger", "(", ")", "and", "other", ".", "_sign", "==", "0", ":", "ideal_exponent", "=", "self", ".", "_exp", "*", "int", "(", "other", ")", "zeros", "=", "min", "(", "exponent", "-", "ideal_exponent", ",", "p", "-", "1", ")", "else", ":", "zeros", "=", "0", "return", "_dec_from_triple", "(", "0", ",", "'1'", "+", "'0'", "*", "zeros", ",", "exponent", "-", "zeros", ")", "# case where y is negative: xc must be either a power", "# of 2 or a power of 5.", "if", "y", ".", "sign", "==", "1", ":", "last_digit", "=", "xc", "%", "10", "if", "last_digit", "in", "(", "2", ",", "4", ",", "6", ",", "8", ")", ":", "# quick test for power of 2", "if", "xc", "&", "-", "xc", "!=", "xc", ":", "return", "None", "# now xc is a power of 2; e is its exponent", "e", "=", "_nbits", "(", "xc", ")", "-", "1", "# We now have:", "#", "# x = 2**e * 10**xe, e > 0, and y < 0.", "#", "# The exact result is:", "#", "# x**y = 5**(-e*y) * 10**(e*y + xe*y)", "#", "# provided that both e*y and xe*y are integers. Note that if", "# 5**(-e*y) >= 10**p, then the result can't be expressed", "# exactly with p digits of precision.", "#", "# Using the above, we can guard against large values of ye.", "# 93/65 is an upper bound for log(10)/log(5), so if", "#", "# ye >= len(str(93*p//65))", "#", "# then", "#", "# -e*y >= -y >= 10**ye > 93*p/65 > p*log(10)/log(5),", "#", "# so 5**(-e*y) >= 10**p, and the coefficient of the result", "# can't be expressed in p digits.", "# emax >= largest e such that 5**e < 10**p.", "emax", "=", "p", "*", "93", "//", "65", "if", "ye", ">=", "len", "(", "str", "(", "emax", ")", ")", ":", "return", "None", "# Find -e*y and -xe*y; both must be integers", "e", "=", "_decimal_lshift_exact", "(", "e", "*", "yc", ",", "ye", ")", "xe", "=", "_decimal_lshift_exact", "(", "xe", "*", "yc", ",", "ye", ")", "if", "e", "is", "None", "or", "xe", "is", "None", ":", "return", "None", "if", "e", ">", "emax", ":", "return", "None", "xc", "=", "5", "**", "e", "elif", "last_digit", "==", "5", ":", "# e >= log_5(xc) if xc is a power of 5; we have", "# equality all the way up to xc=5**2658", "e", "=", "_nbits", "(", "xc", ")", "*", "28", "//", "65", "xc", ",", "remainder", "=", "divmod", "(", "5", "**", "e", ",", "xc", ")", "if", "remainder", ":", "return", "None", "while", "xc", "%", "5", "==", "0", ":", "xc", "//=", "5", "e", "-=", "1", "# Guard against large values of ye, using the same logic as in", "# the 'xc is a power of 2' branch. 10/3 is an upper bound for", "# log(10)/log(2).", "emax", "=", "p", "*", "10", "//", "3", "if", "ye", ">=", "len", "(", "str", "(", "emax", ")", ")", ":", "return", "None", "e", "=", "_decimal_lshift_exact", "(", "e", "*", "yc", ",", "ye", ")", "xe", "=", "_decimal_lshift_exact", "(", "xe", "*", "yc", ",", "ye", ")", "if", "e", "is", "None", "or", "xe", "is", "None", ":", "return", "None", "if", "e", ">", "emax", ":", "return", "None", "xc", "=", "2", "**", "e", "else", ":", "return", "None", "if", "xc", ">=", "10", "**", "p", ":", "return", "None", "xe", "=", "-", "e", "-", "xe", "return", "_dec_from_triple", "(", "0", ",", "str", "(", "xc", ")", ",", "xe", ")", "# now y is positive; find m and n such that y = m/n", "if", "ye", ">=", "0", ":", "m", ",", "n", "=", "yc", "*", "10", "**", "ye", ",", "1", "else", ":", "if", "xe", "!=", "0", "and", "len", "(", "str", "(", "abs", "(", "yc", "*", "xe", ")", ")", ")", "<=", "-", "ye", ":", "return", "None", "xc_bits", "=", "_nbits", "(", "xc", ")", "if", "xc", "!=", "1", "and", "len", "(", "str", "(", "abs", "(", "yc", ")", "*", "xc_bits", ")", ")", "<=", "-", "ye", ":", "return", "None", "m", ",", "n", "=", "yc", ",", "10", "**", "(", "-", "ye", ")", "while", "m", "%", "2", "==", "n", "%", "2", "==", "0", ":", "m", "//=", "2", "n", "//=", "2", "while", "m", "%", "5", "==", "n", "%", "5", "==", "0", ":", "m", "//=", "5", "n", "//=", "5", "# compute nth root of xc*10**xe", "if", "n", ">", "1", ":", "# if 1 < xc < 2**n then xc isn't an nth power", "if", "xc", "!=", "1", "and", "xc_bits", "<=", "n", ":", "return", "None", "xe", ",", "rem", "=", "divmod", "(", "xe", ",", "n", ")", "if", "rem", "!=", "0", ":", "return", "None", "# compute nth root of xc using Newton's method", "a", "=", "1L", "<<", "-", "(", "-", "_nbits", "(", "xc", ")", "//", "n", ")", "# initial estimate", "while", "True", ":", "q", ",", "r", "=", "divmod", "(", "xc", ",", "a", "**", "(", "n", "-", "1", ")", ")", "if", "a", "<=", "q", ":", "break", "else", ":", "a", "=", "(", "a", "*", "(", "n", "-", "1", ")", "+", "q", ")", "//", "n", "if", "not", "(", "a", "==", "q", "and", "r", "==", "0", ")", ":", "return", "None", "xc", "=", "a", "# now xc*10**xe is the nth root of the original xc*10**xe", "# compute mth power of xc*10**xe", "# if m > p*100//_log10_lb(xc) then m > p/log10(xc), hence xc**m >", "# 10**p and the result is not representable.", "if", "xc", ">", "1", "and", "m", ">", "p", "*", "100", "//", "_log10_lb", "(", "xc", ")", ":", "return", "None", "xc", "=", "xc", "**", "m", "xe", "*=", "m", "if", "xc", ">", "10", "**", "p", ":", "return", "None", "# by this point the result *is* exactly representable", "# adjust the exponent to get as close as possible to the ideal", "# exponent, if necessary", "str_xc", "=", "str", "(", "xc", ")", "if", "other", ".", "_isinteger", "(", ")", "and", "other", ".", "_sign", "==", "0", ":", "ideal_exponent", "=", "self", ".", "_exp", "*", "int", "(", "other", ")", "zeros", "=", "min", "(", "xe", "-", "ideal_exponent", ",", "p", "-", "len", "(", "str_xc", ")", ")", "else", ":", "zeros", "=", "0", "return", "_dec_from_triple", "(", "0", ",", "str_xc", "+", "'0'", "*", "zeros", ",", "xe", "-", "zeros", ")" ]
https://github.com/PokemonGoF/PokemonGo-Bot-Desktop/blob/4bfa94f0183406c6a86f93645eff7abd3ad4ced8/build/pywin/Lib/decimal.py#L1936-L2171
saltstack/salt
fae5bc757ad0f1716483ce7ae180b451545c2058
salt/modules/kmod.py
python
_new_mods
(pre_mods, post_mods)
return post - pre
Return a list of the new modules, pass an lsmod dict before running modprobe and one after modprobe has run
Return a list of the new modules, pass an lsmod dict before running modprobe and one after modprobe has run
[ "Return", "a", "list", "of", "the", "new", "modules", "pass", "an", "lsmod", "dict", "before", "running", "modprobe", "and", "one", "after", "modprobe", "has", "run" ]
def _new_mods(pre_mods, post_mods): """ Return a list of the new modules, pass an lsmod dict before running modprobe and one after modprobe has run """ pre = set() post = set() for mod in pre_mods: pre.add(mod["module"]) for mod in post_mods: post.add(mod["module"]) return post - pre
[ "def", "_new_mods", "(", "pre_mods", ",", "post_mods", ")", ":", "pre", "=", "set", "(", ")", "post", "=", "set", "(", ")", "for", "mod", "in", "pre_mods", ":", "pre", ".", "add", "(", "mod", "[", "\"module\"", "]", ")", "for", "mod", "in", "post_mods", ":", "post", ".", "add", "(", "mod", "[", "\"module\"", "]", ")", "return", "post", "-", "pre" ]
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/modules/kmod.py#L22-L33
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_objectvalidator.py
python
main
()
ansible oc module for validating OpenShift objects
ansible oc module for validating OpenShift objects
[ "ansible", "oc", "module", "for", "validating", "OpenShift", "objects" ]
def main(): ''' ansible oc module for validating OpenShift objects ''' module = AnsibleModule( argument_spec=dict( kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'), ), supports_check_mode=False, ) rval = OCObjectValidator.run_ansible(module.params) if 'failed' in rval: module.fail_json(**rval) module.exit_json(**rval)
[ "def", "main", "(", ")", ":", "module", "=", "AnsibleModule", "(", "argument_spec", "=", "dict", "(", "kubeconfig", "=", "dict", "(", "default", "=", "'/etc/origin/master/admin.kubeconfig'", ",", "type", "=", "'str'", ")", ",", ")", ",", "supports_check_mode", "=", "False", ",", ")", "rval", "=", "OCObjectValidator", ".", "run_ansible", "(", "module", ".", "params", ")", "if", "'failed'", "in", "rval", ":", "module", ".", "fail_json", "(", "*", "*", "rval", ")", "module", ".", "exit_json", "(", "*", "*", "rval", ")" ]
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_objectvalidator.py#L1506-L1523
qiyuangong/leetcode
790f9ee86dcc7bf85be1bd9358f4c069b4a4c2f5
python/259_3Sum_Smaller.py
python
Solution.threeSumSmaller
(self, nums, target)
return res
:type nums: List[int] :type target: int :rtype: int
:type nums: List[int] :type target: int :rtype: int
[ ":", "type", "nums", ":", "List", "[", "int", "]", ":", "type", "target", ":", "int", ":", "rtype", ":", "int" ]
def threeSumSmaller(self, nums, target): """ :type nums: List[int] :type target: int :rtype: int """ # https://leetcode.com/articles/3sum-smaller/#approach-2-binary-search-accepted nums.sort() ls = len(nums) res = 0 for i in range(ls - 1): res += self.twoSumSmaller(nums, i + 1, target - nums[i]) return res
[ "def", "threeSumSmaller", "(", "self", ",", "nums", ",", "target", ")", ":", "# https://leetcode.com/articles/3sum-smaller/#approach-2-binary-search-accepted", "nums", ".", "sort", "(", ")", "ls", "=", "len", "(", "nums", ")", "res", "=", "0", "for", "i", "in", "range", "(", "ls", "-", "1", ")", ":", "res", "+=", "self", ".", "twoSumSmaller", "(", "nums", ",", "i", "+", "1", ",", "target", "-", "nums", "[", "i", "]", ")", "return", "res" ]
https://github.com/qiyuangong/leetcode/blob/790f9ee86dcc7bf85be1bd9358f4c069b4a4c2f5/python/259_3Sum_Smaller.py#L34-L46
oleg-yaroshevskiy/quest_qa_labeling
730a9632314e54584f69f909d5e2ef74d843e02c
packages/fairseq-hacked/fairseq/models/masked_lm.py
python
MaskedLMEncoder.max_positions
(self)
return self.max_positions
Maximum output length supported by the encoder.
Maximum output length supported by the encoder.
[ "Maximum", "output", "length", "supported", "by", "the", "encoder", "." ]
def max_positions(self): """Maximum output length supported by the encoder.""" return self.max_positions
[ "def", "max_positions", "(", "self", ")", ":", "return", "self", ".", "max_positions" ]
https://github.com/oleg-yaroshevskiy/quest_qa_labeling/blob/730a9632314e54584f69f909d5e2ef74d843e02c/packages/fairseq-hacked/fairseq/models/masked_lm.py#L294-L296
tanghaibao/jcvi
5e720870c0928996f8b77a38208106ff0447ccb6
jcvi/graphics/base.py
python
discrete_rainbow
(N=7, cmap=cm.Set1, usepreset=True, shuffle=False, plot=False)
return mpl.colors.LinearSegmentedColormap("colormap", cdict, 1024), palette
Return a discrete colormap and the set of colors. modified from <http://www.scipy.org/Cookbook/Matplotlib/ColormapTransformations> cmap: colormap instance, eg. cm.jet. N: Number of colors. Example >>> x = resize(arange(100), (5,100)) >>> djet = cmap_discretize(cm.jet, 5) >>> imshow(x, cmap=djet) See available matplotlib colormaps at: <http://dept.astro.lsa.umich.edu/~msshin/science/code/matplotlib_cm/> If N>20 the sampled colors might not be very distinctive. If you want to error and try anyway, set usepreset=False
Return a discrete colormap and the set of colors.
[ "Return", "a", "discrete", "colormap", "and", "the", "set", "of", "colors", "." ]
def discrete_rainbow(N=7, cmap=cm.Set1, usepreset=True, shuffle=False, plot=False): """ Return a discrete colormap and the set of colors. modified from <http://www.scipy.org/Cookbook/Matplotlib/ColormapTransformations> cmap: colormap instance, eg. cm.jet. N: Number of colors. Example >>> x = resize(arange(100), (5,100)) >>> djet = cmap_discretize(cm.jet, 5) >>> imshow(x, cmap=djet) See available matplotlib colormaps at: <http://dept.astro.lsa.umich.edu/~msshin/science/code/matplotlib_cm/> If N>20 the sampled colors might not be very distinctive. If you want to error and try anyway, set usepreset=False """ import random from scipy import interpolate if usepreset: if 0 < N <= 5: cmap = cm.gist_rainbow elif N <= 20: cmap = cm.Set1 else: sys.exit(discrete_rainbow.__doc__) cdict = cmap._segmentdata.copy() # N colors colors_i = np.linspace(0, 1.0, N) # N+1 indices indices = np.linspace(0, 1.0, N + 1) rgbs = [] for key in ("red", "green", "blue"): # Find the N colors D = np.array(cdict[key]) I = interpolate.interp1d(D[:, 0], D[:, 1]) colors = I(colors_i) rgbs.append(colors) # Place these colors at the correct indices. A = np.zeros((N + 1, 3), float) A[:, 0] = indices A[1:, 1] = colors A[:-1, 2] = colors # Create a tuple for the dictionary. L = [] for l in A: L.append(tuple(l)) cdict[key] = tuple(L) palette = zip(*rgbs) if shuffle: random.shuffle(palette) if plot: print_colors(palette) # Return (colormap object, RGB tuples) return mpl.colors.LinearSegmentedColormap("colormap", cdict, 1024), palette
[ "def", "discrete_rainbow", "(", "N", "=", "7", ",", "cmap", "=", "cm", ".", "Set1", ",", "usepreset", "=", "True", ",", "shuffle", "=", "False", ",", "plot", "=", "False", ")", ":", "import", "random", "from", "scipy", "import", "interpolate", "if", "usepreset", ":", "if", "0", "<", "N", "<=", "5", ":", "cmap", "=", "cm", ".", "gist_rainbow", "elif", "N", "<=", "20", ":", "cmap", "=", "cm", ".", "Set1", "else", ":", "sys", ".", "exit", "(", "discrete_rainbow", ".", "__doc__", ")", "cdict", "=", "cmap", ".", "_segmentdata", ".", "copy", "(", ")", "# N colors", "colors_i", "=", "np", ".", "linspace", "(", "0", ",", "1.0", ",", "N", ")", "# N+1 indices", "indices", "=", "np", ".", "linspace", "(", "0", ",", "1.0", ",", "N", "+", "1", ")", "rgbs", "=", "[", "]", "for", "key", "in", "(", "\"red\"", ",", "\"green\"", ",", "\"blue\"", ")", ":", "# Find the N colors", "D", "=", "np", ".", "array", "(", "cdict", "[", "key", "]", ")", "I", "=", "interpolate", ".", "interp1d", "(", "D", "[", ":", ",", "0", "]", ",", "D", "[", ":", ",", "1", "]", ")", "colors", "=", "I", "(", "colors_i", ")", "rgbs", ".", "append", "(", "colors", ")", "# Place these colors at the correct indices.", "A", "=", "np", ".", "zeros", "(", "(", "N", "+", "1", ",", "3", ")", ",", "float", ")", "A", "[", ":", ",", "0", "]", "=", "indices", "A", "[", "1", ":", ",", "1", "]", "=", "colors", "A", "[", ":", "-", "1", ",", "2", "]", "=", "colors", "# Create a tuple for the dictionary.", "L", "=", "[", "]", "for", "l", "in", "A", ":", "L", ".", "append", "(", "tuple", "(", "l", ")", ")", "cdict", "[", "key", "]", "=", "tuple", "(", "L", ")", "palette", "=", "zip", "(", "*", "rgbs", ")", "if", "shuffle", ":", "random", ".", "shuffle", "(", "palette", ")", "if", "plot", ":", "print_colors", "(", "palette", ")", "# Return (colormap object, RGB tuples)", "return", "mpl", ".", "colors", ".", "LinearSegmentedColormap", "(", "\"colormap\"", ",", "cdict", ",", "1024", ")", ",", "palette" ]
https://github.com/tanghaibao/jcvi/blob/5e720870c0928996f8b77a38208106ff0447ccb6/jcvi/graphics/base.py#L474-L538
NoGameNoLife00/mybolg
afe17ea5bfe405e33766e5682c43a4262232ee12
libs/werkzeug/__init__.py
python
module.__dir__
(self)
return result
Just show what we want to show.
Just show what we want to show.
[ "Just", "show", "what", "we", "want", "to", "show", "." ]
def __dir__(self): """Just show what we want to show.""" result = list(new_module.__all__) result.extend(('__file__', '__path__', '__doc__', '__all__', '__docformat__', '__name__', '__path__', '__package__', '__version__')) return result
[ "def", "__dir__", "(", "self", ")", ":", "result", "=", "list", "(", "new_module", ".", "__all__", ")", "result", ".", "extend", "(", "(", "'__file__'", ",", "'__path__'", ",", "'__doc__'", ",", "'__all__'", ",", "'__docformat__'", ",", "'__name__'", ",", "'__path__'", ",", "'__package__'", ",", "'__version__'", ")", ")", "return", "result" ]
https://github.com/NoGameNoLife00/mybolg/blob/afe17ea5bfe405e33766e5682c43a4262232ee12/libs/werkzeug/__init__.py#L127-L133
nipy/nipype
cd4c34d935a43812d1756482fdc4034844e485b8
nipype/algorithms/misc.py
python
calc_moments
(timeseries_file, moment)
return np.where(zero, 0, m3 / m2 ** (moment / 2.0))
Returns nth moment (3 for skewness, 4 for kurtosis) of timeseries (list of values; one per timeseries). Keyword arguments: timeseries_file -- text file with white space separated timepoints in rows
Returns nth moment (3 for skewness, 4 for kurtosis) of timeseries (list of values; one per timeseries).
[ "Returns", "nth", "moment", "(", "3", "for", "skewness", "4", "for", "kurtosis", ")", "of", "timeseries", "(", "list", "of", "values", ";", "one", "per", "timeseries", ")", "." ]
def calc_moments(timeseries_file, moment): """Returns nth moment (3 for skewness, 4 for kurtosis) of timeseries (list of values; one per timeseries). Keyword arguments: timeseries_file -- text file with white space separated timepoints in rows """ import scipy.stats as stats timeseries = np.genfromtxt(timeseries_file) m2 = stats.moment(timeseries, 2, axis=0) m3 = stats.moment(timeseries, moment, axis=0) zero = m2 == 0 return np.where(zero, 0, m3 / m2 ** (moment / 2.0))
[ "def", "calc_moments", "(", "timeseries_file", ",", "moment", ")", ":", "import", "scipy", ".", "stats", "as", "stats", "timeseries", "=", "np", ".", "genfromtxt", "(", "timeseries_file", ")", "m2", "=", "stats", ".", "moment", "(", "timeseries", ",", "2", ",", "axis", "=", "0", ")", "m3", "=", "stats", ".", "moment", "(", "timeseries", ",", "moment", ",", "axis", "=", "0", ")", "zero", "=", "m2", "==", "0", "return", "np", ".", "where", "(", "zero", ",", "0", ",", "m3", "/", "m2", "**", "(", "moment", "/", "2.0", ")", ")" ]
https://github.com/nipy/nipype/blob/cd4c34d935a43812d1756482fdc4034844e485b8/nipype/algorithms/misc.py#L945-L960
skylander86/lambda-text-extractor
6da52d077a2fc571e38bfe29c33ae68f6443cd5a
lib-linux_x64/odf/odf2xhtml.py
python
ODF2XHTML.s_text_s
(self, tag, attrs)
Generate a number of spaces. ODF has an element; HTML uses &nbsp; We use &#160; so we can send the output through an XML parser if we desire to
Generate a number of spaces. ODF has an element; HTML uses &nbsp; We use &#160; so we can send the output through an XML parser if we desire to
[ "Generate", "a", "number", "of", "spaces", ".", "ODF", "has", "an", "element", ";", "HTML", "uses", "&nbsp", ";", "We", "use", "&#160", ";", "so", "we", "can", "send", "the", "output", "through", "an", "XML", "parser", "if", "we", "desire", "to" ]
def s_text_s(self, tag, attrs): """ Generate a number of spaces. ODF has an element; HTML uses &nbsp; We use &#160; so we can send the output through an XML parser if we desire to """ c = attrs.get( (TEXTNS,'c'),"1") for x in range(int(c)): self.writeout('&#160;')
[ "def", "s_text_s", "(", "self", ",", "tag", ",", "attrs", ")", ":", "c", "=", "attrs", ".", "get", "(", "(", "TEXTNS", ",", "'c'", ")", ",", "\"1\"", ")", "for", "x", "in", "range", "(", "int", "(", "c", ")", ")", ":", "self", ".", "writeout", "(", "'&#160;'", ")" ]
https://github.com/skylander86/lambda-text-extractor/blob/6da52d077a2fc571e38bfe29c33ae68f6443cd5a/lib-linux_x64/odf/odf2xhtml.py#L1381-L1387
replit-archive/empythoned
977ec10ced29a3541a4973dc2b59910805695752
dist/lib/python2.7/weakref.py
python
WeakKeyDictionary.keyrefs
(self)
return self.data.keys()
Return a list of weak references to the keys. The references are not guaranteed to be 'live' at the time they are used, so the result of calling the references needs to be checked before being used. This can be used to avoid creating references that will cause the garbage collector to keep the keys around longer than needed.
Return a list of weak references to the keys.
[ "Return", "a", "list", "of", "weak", "references", "to", "the", "keys", "." ]
def keyrefs(self): """Return a list of weak references to the keys. The references are not guaranteed to be 'live' at the time they are used, so the result of calling the references needs to be checked before being used. This can be used to avoid creating references that will cause the garbage collector to keep the keys around longer than needed. """ return self.data.keys()
[ "def", "keyrefs", "(", "self", ")", ":", "return", "self", ".", "data", ".", "keys", "(", ")" ]
https://github.com/replit-archive/empythoned/blob/977ec10ced29a3541a4973dc2b59910805695752/dist/lib/python2.7/weakref.py#L338-L348
hyperledger/aries-cloudagent-python
2f36776e99f6053ae92eed8123b5b1b2e891c02a
aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py
python
DIFPresExchHandler.apply_constraint_received_cred
( self, constraint: Constraints, cred_dict: dict )
return True
Evaluate constraint from the request against received credential.
Evaluate constraint from the request against received credential.
[ "Evaluate", "constraint", "from", "the", "request", "against", "received", "credential", "." ]
async def apply_constraint_received_cred( self, constraint: Constraints, cred_dict: dict ) -> bool: """Evaluate constraint from the request against received credential.""" fields = constraint._fields field_paths = [] credential = self.create_vcrecord(cred_dict) is_limit_disclosure = constraint.limit_disclosure == "required" for field in fields: if is_limit_disclosure: field = await self.get_updated_field(field, cred_dict) if not await self.filter_by_field(field, credential): return False field_paths = field_paths + ( await self.restrict_field_paths_one_of_filter( field_paths=field.paths, cred_dict=cred_dict ) ) # Selective Disclosure check if is_limit_disclosure: field_paths = set([path.replace("$.", "") for path in field_paths]) mandatory_paths = { "@context", "type", "issuanceDate", "issuer", "proof", "credentialSubject", "id", } to_remove_from_field_paths = set() nested_field_paths = {"credentialSubject": {"id", "type"}} for field_path in field_paths: if field_path.count(".") >= 1: split_field_path = field_path.split(".") key = ".".join(split_field_path[:-1]) value = split_field_path[-1] nested_field_paths = self.build_nested_paths_dict( key, value, nested_field_paths, cred_dict ) to_remove_from_field_paths.add(field_path) for to_remove_path in to_remove_from_field_paths: field_paths.remove(to_remove_path) field_paths = set.union(mandatory_paths, field_paths) for attrs in cred_dict.keys(): if attrs not in field_paths: return False for nested_attr_key in nested_field_paths: nested_attr_values = nested_field_paths[nested_attr_key] extracted = self.nested_get(cred_dict, nested_attr_key) if isinstance(extracted, dict): if not self.check_attr_in_extracted_dict( extracted, nested_attr_values ): return False elif isinstance(extracted, list): for extracted_dict in extracted: if not self.check_attr_in_extracted_dict( extracted_dict, nested_attr_values ): return False return True
[ "async", "def", "apply_constraint_received_cred", "(", "self", ",", "constraint", ":", "Constraints", ",", "cred_dict", ":", "dict", ")", "->", "bool", ":", "fields", "=", "constraint", ".", "_fields", "field_paths", "=", "[", "]", "credential", "=", "self", ".", "create_vcrecord", "(", "cred_dict", ")", "is_limit_disclosure", "=", "constraint", ".", "limit_disclosure", "==", "\"required\"", "for", "field", "in", "fields", ":", "if", "is_limit_disclosure", ":", "field", "=", "await", "self", ".", "get_updated_field", "(", "field", ",", "cred_dict", ")", "if", "not", "await", "self", ".", "filter_by_field", "(", "field", ",", "credential", ")", ":", "return", "False", "field_paths", "=", "field_paths", "+", "(", "await", "self", ".", "restrict_field_paths_one_of_filter", "(", "field_paths", "=", "field", ".", "paths", ",", "cred_dict", "=", "cred_dict", ")", ")", "# Selective Disclosure check", "if", "is_limit_disclosure", ":", "field_paths", "=", "set", "(", "[", "path", ".", "replace", "(", "\"$.\"", ",", "\"\"", ")", "for", "path", "in", "field_paths", "]", ")", "mandatory_paths", "=", "{", "\"@context\"", ",", "\"type\"", ",", "\"issuanceDate\"", ",", "\"issuer\"", ",", "\"proof\"", ",", "\"credentialSubject\"", ",", "\"id\"", ",", "}", "to_remove_from_field_paths", "=", "set", "(", ")", "nested_field_paths", "=", "{", "\"credentialSubject\"", ":", "{", "\"id\"", ",", "\"type\"", "}", "}", "for", "field_path", "in", "field_paths", ":", "if", "field_path", ".", "count", "(", "\".\"", ")", ">=", "1", ":", "split_field_path", "=", "field_path", ".", "split", "(", "\".\"", ")", "key", "=", "\".\"", ".", "join", "(", "split_field_path", "[", ":", "-", "1", "]", ")", "value", "=", "split_field_path", "[", "-", "1", "]", "nested_field_paths", "=", "self", ".", "build_nested_paths_dict", "(", "key", ",", "value", ",", "nested_field_paths", ",", "cred_dict", ")", "to_remove_from_field_paths", ".", "add", "(", "field_path", ")", "for", "to_remove_path", "in", "to_remove_from_field_paths", ":", "field_paths", ".", "remove", "(", "to_remove_path", ")", "field_paths", "=", "set", ".", "union", "(", "mandatory_paths", ",", "field_paths", ")", "for", "attrs", "in", "cred_dict", ".", "keys", "(", ")", ":", "if", "attrs", "not", "in", "field_paths", ":", "return", "False", "for", "nested_attr_key", "in", "nested_field_paths", ":", "nested_attr_values", "=", "nested_field_paths", "[", "nested_attr_key", "]", "extracted", "=", "self", ".", "nested_get", "(", "cred_dict", ",", "nested_attr_key", ")", "if", "isinstance", "(", "extracted", ",", "dict", ")", ":", "if", "not", "self", ".", "check_attr_in_extracted_dict", "(", "extracted", ",", "nested_attr_values", ")", ":", "return", "False", "elif", "isinstance", "(", "extracted", ",", "list", ")", ":", "for", "extracted_dict", "in", "extracted", ":", "if", "not", "self", ".", "check_attr_in_extracted_dict", "(", "extracted_dict", ",", "nested_attr_values", ")", ":", "return", "False", "return", "True" ]
https://github.com/hyperledger/aries-cloudagent-python/blob/2f36776e99f6053ae92eed8123b5b1b2e891c02a/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py#L1410-L1473
TensorMSA/tensormsa
c36b565159cd934533636429add3c7d7263d622b
cluster/common/common_node.py
python
WorkFlowCommonNode.set_net_id
(self, net_id)
set flag for tree search :return:
set flag for tree search :return:
[ "set", "flag", "for", "tree", "search", ":", "return", ":" ]
def set_net_id(self, net_id): """ set flag for tree search :return: """ self.net_id = net_id
[ "def", "set_net_id", "(", "self", ",", "net_id", ")", ":", "self", ".", "net_id", "=", "net_id" ]
https://github.com/TensorMSA/tensormsa/blob/c36b565159cd934533636429add3c7d7263d622b/cluster/common/common_node.py#L66-L71
CedricGuillemet/Imogen
ee417b42747ed5b46cb11b02ef0c3630000085b3
bin/Lib/asyncio/base_events.py
python
BaseEventLoop.run_until_complete
(self, future)
return future.result()
Run until the Future is done. If the argument is a coroutine, it is wrapped in a Task. WARNING: It would be disastrous to call run_until_complete() with the same coroutine twice -- it would wrap it in two different Tasks and that can't be good. Return the Future's result, or raise its exception.
Run until the Future is done.
[ "Run", "until", "the", "Future", "is", "done", "." ]
def run_until_complete(self, future): """Run until the Future is done. If the argument is a coroutine, it is wrapped in a Task. WARNING: It would be disastrous to call run_until_complete() with the same coroutine twice -- it would wrap it in two different Tasks and that can't be good. Return the Future's result, or raise its exception. """ self._check_closed() new_task = not futures.isfuture(future) future = tasks.ensure_future(future, loop=self) if new_task: # An exception is raised if the future didn't complete, so there # is no need to log the "destroy pending task" message future._log_destroy_pending = False future.add_done_callback(_run_until_complete_cb) try: self.run_forever() except: if new_task and future.done() and not future.cancelled(): # The coroutine raised a BaseException. Consume the exception # to not log a warning, the caller doesn't have access to the # local task. future.exception() raise finally: future.remove_done_callback(_run_until_complete_cb) if not future.done(): raise RuntimeError('Event loop stopped before Future completed.') return future.result()
[ "def", "run_until_complete", "(", "self", ",", "future", ")", ":", "self", ".", "_check_closed", "(", ")", "new_task", "=", "not", "futures", ".", "isfuture", "(", "future", ")", "future", "=", "tasks", ".", "ensure_future", "(", "future", ",", "loop", "=", "self", ")", "if", "new_task", ":", "# An exception is raised if the future didn't complete, so there", "# is no need to log the \"destroy pending task\" message", "future", ".", "_log_destroy_pending", "=", "False", "future", ".", "add_done_callback", "(", "_run_until_complete_cb", ")", "try", ":", "self", ".", "run_forever", "(", ")", "except", ":", "if", "new_task", "and", "future", ".", "done", "(", ")", "and", "not", "future", ".", "cancelled", "(", ")", ":", "# The coroutine raised a BaseException. Consume the exception", "# to not log a warning, the caller doesn't have access to the", "# local task.", "future", ".", "exception", "(", ")", "raise", "finally", ":", "future", ".", "remove_done_callback", "(", "_run_until_complete_cb", ")", "if", "not", "future", ".", "done", "(", ")", ":", "raise", "RuntimeError", "(", "'Event loop stopped before Future completed.'", ")", "return", "future", ".", "result", "(", ")" ]
https://github.com/CedricGuillemet/Imogen/blob/ee417b42747ed5b46cb11b02ef0c3630000085b3/bin/Lib/asyncio/base_events.py#L538-L573
keiffster/program-y
8c99b56f8c32f01a7b9887b5daae9465619d0385
src/programy/clients/args.py
python
ClientArguments.noloop
(self)
return self._no_loop
[]
def noloop(self): return self._no_loop
[ "def", "noloop", "(", "self", ")", ":", "return", "self", ".", "_no_loop" ]
https://github.com/keiffster/program-y/blob/8c99b56f8c32f01a7b9887b5daae9465619d0385/src/programy/clients/args.py#L58-L59
avocado-framework/avocado
1f9b3192e8ba47d029c33fe21266bd113d17811f
avocado/utils/softwareraid.py
python
SoftwareRaid.is_recovering
(self)
return False
Checks if raid is recovering. :return: True if recovering, False otherwise. :rtype: bool
Checks if raid is recovering.
[ "Checks", "if", "raid", "is", "recovering", "." ]
def is_recovering(self): """ Checks if raid is recovering. :return: True if recovering, False otherwise. :rtype: bool """ LOG.debug("RECOVERY") for line in self.get_detail().splitlines(): if 'State' in line and 'recovering' in line: return True return False
[ "def", "is_recovering", "(", "self", ")", ":", "LOG", ".", "debug", "(", "\"RECOVERY\"", ")", "for", "line", "in", "self", ".", "get_detail", "(", ")", ".", "splitlines", "(", ")", ":", "if", "'State'", "in", "line", "and", "'recovering'", "in", "line", ":", "return", "True", "return", "False" ]
https://github.com/avocado-framework/avocado/blob/1f9b3192e8ba47d029c33fe21266bd113d17811f/avocado/utils/softwareraid.py#L134-L145
linxid/Machine_Learning_Study_Path
558e82d13237114bbb8152483977806fc0c222af
Machine Learning In Action/Chapter8-Regression/venv/Lib/site-packages/pip-9.0.1-py3.6.egg/pip/_vendor/distlib/version.py
python
NormalizedVersion.is_prerelease
(self)
return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
[]
def is_prerelease(self): return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
[ "def", "is_prerelease", "(", "self", ")", ":", "return", "any", "(", "t", "[", "0", "]", "in", "self", ".", "PREREL_TAGS", "for", "t", "in", "self", ".", "_parts", "if", "t", ")" ]
https://github.com/linxid/Machine_Learning_Study_Path/blob/558e82d13237114bbb8152483977806fc0c222af/Machine Learning In Action/Chapter8-Regression/venv/Lib/site-packages/pip-9.0.1-py3.6.egg/pip/_vendor/distlib/version.py#L286-L287
SteveDoyle2/pyNastran
eda651ac2d4883d95a34951f8a002ff94f642a1a
pyNastran/dev/bdf_vectorized/bdf_interface2/add_card.py
python
AddCard._add_bset_object
(self, set_obj)
adds an BSET/BSET1 object
adds an BSET/BSET1 object
[ "adds", "an", "BSET", "/", "BSET1", "object" ]
def _add_bset_object(self, set_obj): """adds an BSET/BSET1 object""" self.bsets.append(set_obj)
[ "def", "_add_bset_object", "(", "self", ",", "set_obj", ")", ":", "self", ".", "bsets", ".", "append", "(", "set_obj", ")" ]
https://github.com/SteveDoyle2/pyNastran/blob/eda651ac2d4883d95a34951f8a002ff94f642a1a/pyNastran/dev/bdf_vectorized/bdf_interface2/add_card.py#L454-L456
pallets/werkzeug
9efe8c00dcb2b6fc086961ba304729db01912652
examples/shorty/utils.py
python
Pagination.pages
(self)
return max(0, self.count - 1) // self.per_page + 1
Return the number of pages.
Return the number of pages.
[ "Return", "the", "number", "of", "pages", "." ]
def pages(self): """Return the number of pages.""" return max(0, self.count - 1) // self.per_page + 1
[ "def", "pages", "(", "self", ")", ":", "return", "max", "(", "0", ",", "self", ".", "count", "-", "1", ")", "//", "self", ".", "per_page", "+", "1" ]
https://github.com/pallets/werkzeug/blob/9efe8c00dcb2b6fc086961ba304729db01912652/examples/shorty/utils.py#L109-L111