nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework
cb692f527e4e819b6c228187c5702d990a180043
external/Scripting Engine/packages/IronPython.StdLib.2.7.4/content/Lib/nntplib.py
python
NNTP.statcmd
(self, line)
return self.statparse(resp)
Internal: process a STAT, NEXT or LAST command.
Internal: process a STAT, NEXT or LAST command.
[ "Internal", ":", "process", "a", "STAT", "NEXT", "or", "LAST", "command", "." ]
def statcmd(self, line): """Internal: process a STAT, NEXT or LAST command.""" resp = self.shortcmd(line) return self.statparse(resp)
[ "def", "statcmd", "(", "self", ",", "line", ")", ":", "resp", "=", "self", ".", "shortcmd", "(", "line", ")", "return", "self", ".", "statparse", "(", "resp", ")" ]
https://github.com/ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework/blob/cb692f527e4e819b6c228187c5702d990a180043/external/Scripting Engine/packages/IronPython.StdLib.2.7.4/content/Lib/nntplib.py#L382-L385
deepmind/hanabi-learning-environment
54e79594f4b6fb40ebb3004289c6db0e34a8b5fb
hanabi_learning_environment/pyhanabi.py
python
HanabiGame.num_cards
(self, color, rank)
return lib.NumCards(self._game, color, rank)
Returns number of instances of Card(color, rank) in the initial deck.
Returns number of instances of Card(color, rank) in the initial deck.
[ "Returns", "number", "of", "instances", "of", "Card", "(", "color", "rank", ")", "in", "the", "initial", "deck", "." ]
def num_cards(self, color, rank): """Returns number of instances of Card(color, rank) in the initial deck.""" return lib.NumCards(self._game, color, rank)
[ "def", "num_cards", "(", "self", ",", "color", ",", "rank", ")", ":", "return", "lib", ".", "NumCards", "(", "self", ".", "_game", ",", "color", ",", "rank", ")" ]
https://github.com/deepmind/hanabi-learning-environment/blob/54e79594f4b6fb40ebb3004289c6db0e34a8b5fb/hanabi_learning_environment/pyhanabi.py#L773-L775
OpenMDAO/OpenMDAO
f47eb5485a0bb5ea5d2ae5bd6da4b94dc6b296bd
openmdao/utils/assert_utils.py
python
assert_warning
(category, msg, contains_msg=False)
Context manager asserting that a warning is issued. Parameters ---------- category : class The class of the expected warning. msg : str The text of the expected warning. contains_msg : bool Set to True to check that the warning text contains msg, rather than checking equality. Yields ------ None Raises ------ AssertionError If the expected warning is not raised.
Context manager asserting that a warning is issued.
[ "Context", "manager", "asserting", "that", "a", "warning", "is", "issued", "." ]
def assert_warning(category, msg, contains_msg=False): """ Context manager asserting that a warning is issued. Parameters ---------- category : class The class of the expected warning. msg : str The text of the expected warning. contains_msg : bool Set to True to check that the warning text contains msg, rather than checking equality. Yields ------ None Raises ------ AssertionError If the expected warning is not raised. """ with reset_warning_registry(): with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") yield for warn in w: if contains_msg: warn_clause = msg in str(warn.message) else: warn_clause = str(warn.message) == msg if (issubclass(warn.category, category) and warn_clause): break else: msg = f"Did not see expected {category.__name__}:\n{msg}" if w: ws = '\n'.join([str(ww.message) for ww in w]) categories = '\n'.join([str(ww.category.__name__) for ww in w]) msg += f"\nDid see warnings [{categories}]:\n{ws}" raise AssertionError(msg)
[ "def", "assert_warning", "(", "category", ",", "msg", ",", "contains_msg", "=", "False", ")", ":", "with", "reset_warning_registry", "(", ")", ":", "with", "warnings", ".", "catch_warnings", "(", "record", "=", "True", ")", "as", "w", ":", "warnings", ".", "simplefilter", "(", "\"always\"", ")", "yield", "for", "warn", "in", "w", ":", "if", "contains_msg", ":", "warn_clause", "=", "msg", "in", "str", "(", "warn", ".", "message", ")", "else", ":", "warn_clause", "=", "str", "(", "warn", ".", "message", ")", "==", "msg", "if", "(", "issubclass", "(", "warn", ".", "category", ",", "category", ")", "and", "warn_clause", ")", ":", "break", "else", ":", "msg", "=", "f\"Did not see expected {category.__name__}:\\n{msg}\"", "if", "w", ":", "ws", "=", "'\\n'", ".", "join", "(", "[", "str", "(", "ww", ".", "message", ")", "for", "ww", "in", "w", "]", ")", "categories", "=", "'\\n'", ".", "join", "(", "[", "str", "(", "ww", ".", "category", ".", "__name__", ")", "for", "ww", "in", "w", "]", ")", "msg", "+=", "f\"\\nDid see warnings [{categories}]:\\n{ws}\"", "raise", "AssertionError", "(", "msg", ")" ]
https://github.com/OpenMDAO/OpenMDAO/blob/f47eb5485a0bb5ea5d2ae5bd6da4b94dc6b296bd/openmdao/utils/assert_utils.py#L21-L62
openstack/python-keystoneclient
100253d52e0c62dffffddb6f046ad660a9bce1a9
keystoneclient/base.py
python
Manager._delete
(self, url, **kwargs)
return resp, self._prepare_return_value(resp, body)
Delete an object. :param url: a partial URL, e.g., '/servers/my-server' :param kwargs: Additional arguments will be passed to the request.
Delete an object.
[ "Delete", "an", "object", "." ]
def _delete(self, url, **kwargs): """Delete an object. :param url: a partial URL, e.g., '/servers/my-server' :param kwargs: Additional arguments will be passed to the request. """ resp, body = self.client.delete(url, **kwargs) return resp, self._prepare_return_value(resp, body)
[ "def", "_delete", "(", "self", ",", "url", ",", "*", "*", "kwargs", ")", ":", "resp", ",", "body", "=", "self", ".", "client", ".", "delete", "(", "url", ",", "*", "*", "kwargs", ")", "return", "resp", ",", "self", ".", "_prepare_return_value", "(", "resp", ",", "body", ")" ]
https://github.com/openstack/python-keystoneclient/blob/100253d52e0c62dffffddb6f046ad660a9bce1a9/keystoneclient/base.py#L240-L247
SteveDoyle2/pyNastran
eda651ac2d4883d95a34951f8a002ff94f642a1a
pyNastran/op2/op2_interface/op2_scalar.py
python
OP2_Scalar.__init__
(self, debug=False, log=None, debug_file=None)
Initializes the OP2_Scalar object Parameters ---------- debug : bool/None; default=True used to set the logger if no logger is passed in True: logs debug/info/warning/error messages False: logs info/warning/error messages None: logs warning/error messages log : Log() a logging object to write debug messages to (.. seealso:: import logging) debug_file : str; default=None (No debug) sets the filename that will be written to
Initializes the OP2_Scalar object
[ "Initializes", "the", "OP2_Scalar", "object" ]
def __init__(self, debug=False, log=None, debug_file=None): """ Initializes the OP2_Scalar object Parameters ---------- debug : bool/None; default=True used to set the logger if no logger is passed in True: logs debug/info/warning/error messages False: logs info/warning/error messages None: logs warning/error messages log : Log() a logging object to write debug messages to (.. seealso:: import logging) debug_file : str; default=None (No debug) sets the filename that will be written to """ assert debug is None or isinstance(debug, bool), 'debug=%r' % debug self.log = get_logger2(log, debug=debug, encoding='utf-8') self._count = 0 self.op2_filename = None self.bdf_filename = None self.f06_filename = None self.des_filename = None self.h5_filename = None self._encoding = 'utf8' #: should a MATPOOL "symmetric" matrix be stored as symmetric #: it takes double the RAM, but is easier to use self.apply_symmetry = True OP2Common.__init__(self) FortranFormat.__init__(self) self.is_vectorized = False self._close_op2 = True self.result_names = set() self.grid_point_weight = {} self.words = [] self.debug = debug self._last_comment = None #self.debug = True #self.debug = False #debug_file = None if debug_file is None: self.debug_file = None else: assert isinstance(debug_file, str), debug_file self.debug_file = debug_file self.op2_reader = OP2Reader(self)
[ "def", "__init__", "(", "self", ",", "debug", "=", "False", ",", "log", "=", "None", ",", "debug_file", "=", "None", ")", ":", "assert", "debug", "is", "None", "or", "isinstance", "(", "debug", ",", "bool", ")", ",", "'debug=%r'", "%", "debug", "self", ".", "log", "=", "get_logger2", "(", "log", ",", "debug", "=", "debug", ",", "encoding", "=", "'utf-8'", ")", "self", ".", "_count", "=", "0", "self", ".", "op2_filename", "=", "None", "self", ".", "bdf_filename", "=", "None", "self", ".", "f06_filename", "=", "None", "self", ".", "des_filename", "=", "None", "self", ".", "h5_filename", "=", "None", "self", ".", "_encoding", "=", "'utf8'", "#: should a MATPOOL \"symmetric\" matrix be stored as symmetric", "#: it takes double the RAM, but is easier to use", "self", ".", "apply_symmetry", "=", "True", "OP2Common", ".", "__init__", "(", "self", ")", "FortranFormat", ".", "__init__", "(", "self", ")", "self", ".", "is_vectorized", "=", "False", "self", ".", "_close_op2", "=", "True", "self", ".", "result_names", "=", "set", "(", ")", "self", ".", "grid_point_weight", "=", "{", "}", "self", ".", "words", "=", "[", "]", "self", ".", "debug", "=", "debug", "self", ".", "_last_comment", "=", "None", "#self.debug = True", "#self.debug = False", "#debug_file = None", "if", "debug_file", "is", "None", ":", "self", ".", "debug_file", "=", "None", "else", ":", "assert", "isinstance", "(", "debug_file", ",", "str", ")", ",", "debug_file", "self", ".", "debug_file", "=", "debug_file", "self", ".", "op2_reader", "=", "OP2Reader", "(", "self", ")" ]
https://github.com/SteveDoyle2/pyNastran/blob/eda651ac2d4883d95a34951f8a002ff94f642a1a/pyNastran/op2/op2_interface/op2_scalar.py#L535-L589
miyosuda/unreal
31d4886149412fa248f6efa490ab65bd2f425cde
train/rmsprop_applier.py
python
RMSPropApplier._slot_dict
(self, slot_name)
return named_slots
[]
def _slot_dict(self, slot_name): named_slots = self._slots.get(slot_name, None) if named_slots is None: named_slots = {} self._slots[slot_name] = named_slots return named_slots
[ "def", "_slot_dict", "(", "self", ",", "slot_name", ")", ":", "named_slots", "=", "self", ".", "_slots", ".", "get", "(", "slot_name", ",", "None", ")", "if", "named_slots", "is", "None", ":", "named_slots", "=", "{", "}", "self", ".", "_slots", "[", "slot_name", "]", "=", "named_slots", "return", "named_slots" ]
https://github.com/miyosuda/unreal/blob/31d4886149412fa248f6efa490ab65bd2f425cde/train/rmsprop_applier.py#L54-L59
SymbiFlow/prjxray
5349556bc2c230801d6df0cf11bccb9cfd171639
fuzzers/034-cmt-pll-pips/fixup_and_group.py
python
bit_to_str
(bit)
return "{}{}_{:02d}".format(s, bit[0], bit[1])
Converts a tuple (frame, bit, value) to its string representation.
Converts a tuple (frame, bit, value) to its string representation.
[ "Converts", "a", "tuple", "(", "frame", "bit", "value", ")", "to", "its", "string", "representation", "." ]
def bit_to_str(bit): """ Converts a tuple (frame, bit, value) to its string representation. """ s = "!" if not bit[2] else "" return "{}{}_{:02d}".format(s, bit[0], bit[1])
[ "def", "bit_to_str", "(", "bit", ")", ":", "s", "=", "\"!\"", "if", "not", "bit", "[", "2", "]", "else", "\"\"", "return", "\"{}{}_{:02d}\"", ".", "format", "(", "s", ",", "bit", "[", "0", "]", ",", "bit", "[", "1", "]", ")" ]
https://github.com/SymbiFlow/prjxray/blob/5349556bc2c230801d6df0cf11bccb9cfd171639/fuzzers/034-cmt-pll-pips/fixup_and_group.py#L81-L86
jieter/django-tables2
ce392ee2ee341d7180345a6113919cf9a3925f16
django_tables2/views.py
python
SingleTableMixin.get_table_class
(self)
Return the class to use for the table.
Return the class to use for the table.
[ "Return", "the", "class", "to", "use", "for", "the", "table", "." ]
def get_table_class(self): """ Return the class to use for the table. """ if self.table_class: return self.table_class if self.model: return tables.table_factory(self.model) raise ImproperlyConfigured( "You must either specify {0}.table_class or {0}.model".format(type(self).__name__) )
[ "def", "get_table_class", "(", "self", ")", ":", "if", "self", ".", "table_class", ":", "return", "self", ".", "table_class", "if", "self", ".", "model", ":", "return", "tables", ".", "table_factory", "(", "self", ".", "model", ")", "raise", "ImproperlyConfigured", "(", "\"You must either specify {0}.table_class or {0}.model\"", ".", "format", "(", "type", "(", "self", ")", ".", "__name__", ")", ")" ]
https://github.com/jieter/django-tables2/blob/ce392ee2ee341d7180345a6113919cf9a3925f16/django_tables2/views.py#L86-L97
bruderstein/PythonScript
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
PythonLib/full/idlelib/pyshell.py
python
PyShell.ResetColorizer
(self)
[]
def ResetColorizer(self): super().ResetColorizer() theme = idleConf.CurrentTheme() tag_colors = { "stdin": {'background': None, 'foreground': None}, "stdout": idleConf.GetHighlight(theme, "stdout"), "stderr": idleConf.GetHighlight(theme, "stderr"), "console": idleConf.GetHighlight(theme, "normal"), } for tag, tag_colors_config in tag_colors.items(): self.text.tag_configure(tag, **tag_colors_config) if self.shell_sidebar is not None: self.shell_sidebar.update_colors()
[ "def", "ResetColorizer", "(", "self", ")", ":", "super", "(", ")", ".", "ResetColorizer", "(", ")", "theme", "=", "idleConf", ".", "CurrentTheme", "(", ")", "tag_colors", "=", "{", "\"stdin\"", ":", "{", "'background'", ":", "None", ",", "'foreground'", ":", "None", "}", ",", "\"stdout\"", ":", "idleConf", ".", "GetHighlight", "(", "theme", ",", "\"stdout\"", ")", ",", "\"stderr\"", ":", "idleConf", ".", "GetHighlight", "(", "theme", ",", "\"stderr\"", ")", ",", "\"console\"", ":", "idleConf", ".", "GetHighlight", "(", "theme", ",", "\"normal\"", ")", ",", "}", "for", "tag", ",", "tag_colors_config", "in", "tag_colors", ".", "items", "(", ")", ":", "self", ".", "text", ".", "tag_configure", "(", "tag", ",", "*", "*", "tag_colors_config", ")", "if", "self", ".", "shell_sidebar", "is", "not", "None", ":", "self", ".", "shell_sidebar", ".", "update_colors", "(", ")" ]
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/full/idlelib/pyshell.py#L975-L989
pulp/pulp
a0a28d804f997b6f81c391378aff2e4c90183df9
server/pulp/server/managers/repo/_common.py
python
to_related_repo
(repo_data, configs)
return r
Converts the given database representation of a repository into a plugin's representation of a related repository. The list of configurations for the repository's plugins will be included in the returned type. @param repo_data: database representation of a repository @type repo_data: dict @param configs: list of configurations for all relevant plugins on the repo @type configs: list @return: transfer object used in many plugin API calls @rtype: pulp.plugins.model.RelatedRepository
Converts the given database representation of a repository into a plugin's representation of a related repository. The list of configurations for the repository's plugins will be included in the returned type.
[ "Converts", "the", "given", "database", "representation", "of", "a", "repository", "into", "a", "plugin", "s", "representation", "of", "a", "related", "repository", ".", "The", "list", "of", "configurations", "for", "the", "repository", "s", "plugins", "will", "be", "included", "in", "the", "returned", "type", "." ]
def to_related_repo(repo_data, configs): """ Converts the given database representation of a repository into a plugin's representation of a related repository. The list of configurations for the repository's plugins will be included in the returned type. @param repo_data: database representation of a repository @type repo_data: dict @param configs: list of configurations for all relevant plugins on the repo @type configs: list @return: transfer object used in many plugin API calls @rtype: pulp.plugins.model.RelatedRepository """ r = RelatedRepository(repo_data['id'], configs, repo_data['display_name'], repo_data['description'], repo_data['notes']) return r
[ "def", "to_related_repo", "(", "repo_data", ",", "configs", ")", ":", "r", "=", "RelatedRepository", "(", "repo_data", "[", "'id'", "]", ",", "configs", ",", "repo_data", "[", "'display_name'", "]", ",", "repo_data", "[", "'description'", "]", ",", "repo_data", "[", "'notes'", "]", ")", "return", "r" ]
https://github.com/pulp/pulp/blob/a0a28d804f997b6f81c391378aff2e4c90183df9/server/pulp/server/managers/repo/_common.py#L25-L42
openedx/edx-platform
68dd185a0ab45862a2a61e0f803d7e03d2be71b5
lms/djangoapps/discussion/rest_api/forms.py
python
ThreadListGetForm.clean_following
(self)
Validate following
Validate following
[ "Validate", "following" ]
def clean_following(self): """Validate following""" value = self.cleaned_data["following"] if value is False: # lint-amnesty, pylint: disable=no-else-raise raise ValidationError("The value of the 'following' parameter must be true.") else: return value
[ "def", "clean_following", "(", "self", ")", ":", "value", "=", "self", ".", "cleaned_data", "[", "\"following\"", "]", "if", "value", "is", "False", ":", "# lint-amnesty, pylint: disable=no-else-raise", "raise", "ValidationError", "(", "\"The value of the 'following' parameter must be true.\"", ")", "else", ":", "return", "value" ]
https://github.com/openedx/edx-platform/blob/68dd185a0ab45862a2a61e0f803d7e03d2be71b5/lms/djangoapps/discussion/rest_api/forms.py#L82-L88
lvzhaoyang/DeeperInverseCompositionalAlgorithm
9a401bf2b03ecfed169c1a655b4fe8be8a4c211d
code/models/LeastSquareTracking.py
python
LeastSquareTracking.__Nto1
(self, x)
return x.sum(dim=1, keepdim=True) / C
Take the average of multi-dimension feature into one dimensional, which boostrap the optimization speed
Take the average of multi-dimension feature into one dimensional, which boostrap the optimization speed
[ "Take", "the", "average", "of", "multi", "-", "dimension", "feature", "into", "one", "dimensional", "which", "boostrap", "the", "optimization", "speed" ]
def __Nto1(self, x): """ Take the average of multi-dimension feature into one dimensional, which boostrap the optimization speed """ C = x.shape[1] return x.sum(dim=1, keepdim=True) / C
[ "def", "__Nto1", "(", "self", ",", "x", ")", ":", "C", "=", "x", ".", "shape", "[", "1", "]", "return", "x", ".", "sum", "(", "dim", "=", "1", ",", "keepdim", "=", "True", ")", "/", "C" ]
https://github.com/lvzhaoyang/DeeperInverseCompositionalAlgorithm/blob/9a401bf2b03ecfed169c1a655b4fe8be8a4c211d/code/models/LeastSquareTracking.py#L223-L228
collinsctk/PyQYT
7af3673955f94ff1b2df2f94220cd2dab2e252af
ExtentionPackages/Crypto/PublicKey/RSA.py
python
_RSAobj.verify
(self, M, signature)
return pubkey.pubkey.verify(self, M, signature)
Verify the validity of an RSA signature. :attention: this function performs the plain, primitive RSA encryption (*textbook*). In real applications, you always need to use proper cryptographic padding, and you should not directly verify data with this method. Failure to do so may lead to security vulnerabilities. It is recommended to use modules `Crypto.Signature.PKCS1_PSS` or `Crypto.Signature.PKCS1_v1_5` instead. :Parameter M: The expected message. :Type M: byte string or long :Parameter signature: The RSA signature to verify. The first item of the tuple is the actual signature (a long not larger than the modulus **n**), whereas the second item is always ignored. :Type signature: A 2-item tuple as return by `sign` :Return: True if the signature is correct, False otherwise.
Verify the validity of an RSA signature.
[ "Verify", "the", "validity", "of", "an", "RSA", "signature", "." ]
def verify(self, M, signature): """Verify the validity of an RSA signature. :attention: this function performs the plain, primitive RSA encryption (*textbook*). In real applications, you always need to use proper cryptographic padding, and you should not directly verify data with this method. Failure to do so may lead to security vulnerabilities. It is recommended to use modules `Crypto.Signature.PKCS1_PSS` or `Crypto.Signature.PKCS1_v1_5` instead. :Parameter M: The expected message. :Type M: byte string or long :Parameter signature: The RSA signature to verify. The first item of the tuple is the actual signature (a long not larger than the modulus **n**), whereas the second item is always ignored. :Type signature: A 2-item tuple as return by `sign` :Return: True if the signature is correct, False otherwise. """ return pubkey.pubkey.verify(self, M, signature)
[ "def", "verify", "(", "self", ",", "M", ",", "signature", ")", ":", "return", "pubkey", ".", "pubkey", ".", "verify", "(", "self", ",", "M", ",", "signature", ")" ]
https://github.com/collinsctk/PyQYT/blob/7af3673955f94ff1b2df2f94220cd2dab2e252af/ExtentionPackages/Crypto/PublicKey/RSA.py#L201-L221
Kismuz/btgym
7fb3316e67f1d7a17c620630fb62fb29428b2cec
btgym/envs/base.py
python
BTgymEnv._comm_with_timeout
( socket, message,)
return response
Exchanges messages via socket, timeout sensitive. Args: socket: zmq connected socket to communicate via; message: message to send; Note: socket zmq.RCVTIMEO and zmq.SNDTIMEO should be set to some finite number of milliseconds. Returns: dictionary: `status`: communication result; `message`: received message if status == `ok` or None; `time`: remote side response time.
Exchanges messages via socket, timeout sensitive.
[ "Exchanges", "messages", "via", "socket", "timeout", "sensitive", "." ]
def _comm_with_timeout( socket, message,): """ Exchanges messages via socket, timeout sensitive. Args: socket: zmq connected socket to communicate via; message: message to send; Note: socket zmq.RCVTIMEO and zmq.SNDTIMEO should be set to some finite number of milliseconds. Returns: dictionary: `status`: communication result; `message`: received message if status == `ok` or None; `time`: remote side response time. """ response = dict( status='ok', message=None, ) try: socket.send_pyobj(message) except zmq.ZMQError as e: if e.errno == zmq.EAGAIN: response['status'] = 'send_failed_due_to_connect_timeout' else: response['status'] = 'send_failed_for_unknown_reason' return response start = time.time() try: response['message'] = socket.recv_pyobj() response['time'] = time.time() - start except zmq.ZMQError as e: if e.errno == zmq.EAGAIN: response['status'] = 'receive_failed_due_to_connect_timeout' else: response['status'] = 'receive_failed_for_unknown_reason' return response return response
[ "def", "_comm_with_timeout", "(", "socket", ",", "message", ",", ")", ":", "response", "=", "dict", "(", "status", "=", "'ok'", ",", "message", "=", "None", ",", ")", "try", ":", "socket", ".", "send_pyobj", "(", "message", ")", "except", "zmq", ".", "ZMQError", "as", "e", ":", "if", "e", ".", "errno", "==", "zmq", ".", "EAGAIN", ":", "response", "[", "'status'", "]", "=", "'send_failed_due_to_connect_timeout'", "else", ":", "response", "[", "'status'", "]", "=", "'send_failed_for_unknown_reason'", "return", "response", "start", "=", "time", ".", "time", "(", ")", "try", ":", "response", "[", "'message'", "]", "=", "socket", ".", "recv_pyobj", "(", ")", "response", "[", "'time'", "]", "=", "time", ".", "time", "(", ")", "-", "start", "except", "zmq", ".", "ZMQError", "as", "e", ":", "if", "e", ".", "errno", "==", "zmq", ".", "EAGAIN", ":", "response", "[", "'status'", "]", "=", "'receive_failed_due_to_connect_timeout'", "else", ":", "response", "[", "'status'", "]", "=", "'receive_failed_for_unknown_reason'", "return", "response", "return", "response" ]
https://github.com/Kismuz/btgym/blob/7fb3316e67f1d7a17c620630fb62fb29428b2cec/btgym/envs/base.py#L453-L498
google/python-fire
ed44d8b801fc24e40729abef11b2dcbf6588d361
fire/console/console_attr.py
python
ConsoleAttr._GetConsoleEncoding
(self)
return None
Gets the encoding as declared by the stdout stream. Returns: str, The encoding name or None if it could not be determined.
Gets the encoding as declared by the stdout stream.
[ "Gets", "the", "encoding", "as", "declared", "by", "the", "stdout", "stream", "." ]
def _GetConsoleEncoding(self): """Gets the encoding as declared by the stdout stream. Returns: str, The encoding name or None if it could not be determined. """ console_encoding = getattr(sys.stdout, 'encoding', None) if not console_encoding: return None console_encoding = console_encoding.lower() if 'utf-8' in console_encoding: return 'utf8' elif 'cp437' in console_encoding: return 'cp437' return None
[ "def", "_GetConsoleEncoding", "(", "self", ")", ":", "console_encoding", "=", "getattr", "(", "sys", ".", "stdout", ",", "'encoding'", ",", "None", ")", "if", "not", "console_encoding", ":", "return", "None", "console_encoding", "=", "console_encoding", ".", "lower", "(", ")", "if", "'utf-8'", "in", "console_encoding", ":", "return", "'utf8'", "elif", "'cp437'", "in", "console_encoding", ":", "return", "'cp437'", "return", "None" ]
https://github.com/google/python-fire/blob/ed44d8b801fc24e40729abef11b2dcbf6588d361/fire/console/console_attr.py#L307-L321
pypa/pipenv
b21baade71a86ab3ee1429f71fbc14d4f95fb75d
pipenv/vendor/distlib/resources.py
python
Resource.as_stream
(self)
return self.finder.get_stream(self)
Get the resource as a stream. This is not a property to make it obvious that it returns a new stream each time.
Get the resource as a stream.
[ "Get", "the", "resource", "as", "a", "stream", "." ]
def as_stream(self): """ Get the resource as a stream. This is not a property to make it obvious that it returns a new stream each time. """ return self.finder.get_stream(self)
[ "def", "as_stream", "(", "self", ")", ":", "return", "self", ".", "finder", ".", "get_stream", "(", "self", ")" ]
https://github.com/pypa/pipenv/blob/b21baade71a86ab3ee1429f71fbc14d4f95fb75d/pipenv/vendor/distlib/resources.py#L85-L92
openstack/openstacksdk
58384268487fa854f21c470b101641ab382c9897
openstack/identity/v3/domain.py
python
Domain.assign_role_to_group
(self, session, group, role)
return False
Assign role to group on domain
Assign role to group on domain
[ "Assign", "role", "to", "group", "on", "domain" ]
def assign_role_to_group(self, session, group, role): """Assign role to group on domain""" url = utils.urljoin(self.base_path, self.id, 'groups', group.id, 'roles', role.id) resp = session.put(url,) if resp.status_code == 204: return True return False
[ "def", "assign_role_to_group", "(", "self", ",", "session", ",", "group", ",", "role", ")", ":", "url", "=", "utils", ".", "urljoin", "(", "self", ".", "base_path", ",", "self", ".", "id", ",", "'groups'", ",", "group", ".", "id", ",", "'roles'", ",", "role", ".", "id", ")", "resp", "=", "session", ".", "put", "(", "url", ",", ")", "if", "resp", ".", "status_code", "==", "204", ":", "return", "True", "return", "False" ]
https://github.com/openstack/openstacksdk/blob/58384268487fa854f21c470b101641ab382c9897/openstack/identity/v3/domain.py#L78-L85
markj3d/Red9_StudioPack
1d40a8bf84c45ce7eaefdd9ccfa3cdbeb1471919
packages/pydub/pydub/utils.py
python
ratio_to_db
(ratio, val2=None, using_amplitude=True)
Converts the input float to db, which represents the equivalent to the ratio in power represented by the multiplier passed in.
Converts the input float to db, which represents the equivalent to the ratio in power represented by the multiplier passed in.
[ "Converts", "the", "input", "float", "to", "db", "which", "represents", "the", "equivalent", "to", "the", "ratio", "in", "power", "represented", "by", "the", "multiplier", "passed", "in", "." ]
def ratio_to_db(ratio, val2=None, using_amplitude=True): """ Converts the input float to db, which represents the equivalent to the ratio in power represented by the multiplier passed in. """ ratio = float(ratio) # accept 2 values and use the ratio of val1 to val2 if val2 is not None: ratio = ratio / val2 # special case for multiply-by-zero (convert to silence) if ratio == 0: return -float('inf') if using_amplitude: return 20 * log(ratio, 10) else: # using power return 10 * log(ratio, 10)
[ "def", "ratio_to_db", "(", "ratio", ",", "val2", "=", "None", ",", "using_amplitude", "=", "True", ")", ":", "ratio", "=", "float", "(", "ratio", ")", "# accept 2 values and use the ratio of val1 to val2", "if", "val2", "is", "not", "None", ":", "ratio", "=", "ratio", "/", "val2", "# special case for multiply-by-zero (convert to silence)", "if", "ratio", "==", "0", ":", "return", "-", "float", "(", "'inf'", ")", "if", "using_amplitude", ":", "return", "20", "*", "log", "(", "ratio", ",", "10", ")", "else", ":", "# using power", "return", "10", "*", "log", "(", "ratio", ",", "10", ")" ]
https://github.com/markj3d/Red9_StudioPack/blob/1d40a8bf84c45ce7eaefdd9ccfa3cdbeb1471919/packages/pydub/pydub/utils.py#L76-L94
quantumblacklabs/causalnex
127d9324a3d68c1795299c7522f22cdea880f344
causalnex/structure/pytorch/sklearn/_base.py
python
DAGBase.fit
(self, X: Union[pd.DataFrame, np.ndarray], y: Union[pd.Series, np.ndarray])
return self
Fits the sm model using the concat of X and y.
Fits the sm model using the concat of X and y.
[ "Fits", "the", "sm", "model", "using", "the", "concat", "of", "X", "and", "y", "." ]
def fit(self, X: Union[pd.DataFrame, np.ndarray], y: Union[pd.Series, np.ndarray]): """ Fits the sm model using the concat of X and y. """ # defensive X, y checks check_X_y(X, y, y_numeric=True) # force X, y to DataFrame, Series for later calculations X = pd.DataFrame(X) y = pd.Series(y) # force name so that name != None (causes errors in notears) y.name = y.name or "__target" # if self.dist_type_schema is None, assume all columns are continuous # NOTE: this is copied due to later insertions dist_type_schema = copy.deepcopy(self.dist_type_schema) or { col: "cont" for col in X.columns } if self.standardize: # only standardize the continuous dist type columns. self.continuous_col_idxs = [ X.columns.get_loc(col) for col, alias in dist_type_schema.items() if alias == "cont" ] # copy X to prevet changes to underlying array data X = X.copy() self._ss_X = StandardScaler() X.iloc[:, self.continuous_col_idxs] = self._ss_X.fit_transform( X.iloc[:, self.continuous_col_idxs] ) # if its a continuous target also standardize if self.target_dist_type == "cont": y = y.copy() self._ss_y = StandardScaler() y[:] = self._ss_y.fit_transform(y.values.reshape(-1, 1)).reshape(-1) # add the target to the dist_type_schema # NOTE: this must be done AFTER standardize dist_type_schema[y.name] = self.target_dist_type # preserve the feature and target colnames self._features = tuple(X.columns) self._target = y.name # concat X and y along column axis X = pd.concat([X, y], axis=1) # make copy to prevent mutability tabu_parent_nodes = copy.deepcopy(self.tabu_parent_nodes) if self.dependent_target: if tabu_parent_nodes is None: tabu_parent_nodes = [self._target] elif self._target not in tabu_parent_nodes: tabu_parent_nodes.append(self._target) # fit the structured model self.graph_ = notears.from_pandas( X, dist_type_schema=dist_type_schema, lasso_beta=self.alpha, ridge_beta=self.beta, hidden_layer_units=self.hidden_layer_units, w_threshold=self.threshold, tabu_edges=self.tabu_edges, tabu_parent_nodes=tabu_parent_nodes, tabu_child_nodes=self.tabu_child_nodes, use_bias=self.fit_intercept, **(self.notears_mlp_kwargs or {}), ) # keep thresholding until the DAG constraint is enforced if self.enforce_dag: self.graph_.threshold_till_dag() return self
[ "def", "fit", "(", "self", ",", "X", ":", "Union", "[", "pd", ".", "DataFrame", ",", "np", ".", "ndarray", "]", ",", "y", ":", "Union", "[", "pd", ".", "Series", ",", "np", ".", "ndarray", "]", ")", ":", "# defensive X, y checks", "check_X_y", "(", "X", ",", "y", ",", "y_numeric", "=", "True", ")", "# force X, y to DataFrame, Series for later calculations", "X", "=", "pd", ".", "DataFrame", "(", "X", ")", "y", "=", "pd", ".", "Series", "(", "y", ")", "# force name so that name != None (causes errors in notears)", "y", ".", "name", "=", "y", ".", "name", "or", "\"__target\"", "# if self.dist_type_schema is None, assume all columns are continuous", "# NOTE: this is copied due to later insertions", "dist_type_schema", "=", "copy", ".", "deepcopy", "(", "self", ".", "dist_type_schema", ")", "or", "{", "col", ":", "\"cont\"", "for", "col", "in", "X", ".", "columns", "}", "if", "self", ".", "standardize", ":", "# only standardize the continuous dist type columns.", "self", ".", "continuous_col_idxs", "=", "[", "X", ".", "columns", ".", "get_loc", "(", "col", ")", "for", "col", ",", "alias", "in", "dist_type_schema", ".", "items", "(", ")", "if", "alias", "==", "\"cont\"", "]", "# copy X to prevet changes to underlying array data", "X", "=", "X", ".", "copy", "(", ")", "self", ".", "_ss_X", "=", "StandardScaler", "(", ")", "X", ".", "iloc", "[", ":", ",", "self", ".", "continuous_col_idxs", "]", "=", "self", ".", "_ss_X", ".", "fit_transform", "(", "X", ".", "iloc", "[", ":", ",", "self", ".", "continuous_col_idxs", "]", ")", "# if its a continuous target also standardize", "if", "self", ".", "target_dist_type", "==", "\"cont\"", ":", "y", "=", "y", ".", "copy", "(", ")", "self", ".", "_ss_y", "=", "StandardScaler", "(", ")", "y", "[", ":", "]", "=", "self", ".", "_ss_y", ".", "fit_transform", "(", "y", ".", "values", ".", "reshape", "(", "-", "1", ",", "1", ")", ")", ".", "reshape", "(", "-", "1", ")", "# add the target to the dist_type_schema", "# NOTE: this must be done AFTER standardize", "dist_type_schema", "[", "y", ".", "name", "]", "=", "self", ".", "target_dist_type", "# preserve the feature and target colnames", "self", ".", "_features", "=", "tuple", "(", "X", ".", "columns", ")", "self", ".", "_target", "=", "y", ".", "name", "# concat X and y along column axis", "X", "=", "pd", ".", "concat", "(", "[", "X", ",", "y", "]", ",", "axis", "=", "1", ")", "# make copy to prevent mutability", "tabu_parent_nodes", "=", "copy", ".", "deepcopy", "(", "self", ".", "tabu_parent_nodes", ")", "if", "self", ".", "dependent_target", ":", "if", "tabu_parent_nodes", "is", "None", ":", "tabu_parent_nodes", "=", "[", "self", ".", "_target", "]", "elif", "self", ".", "_target", "not", "in", "tabu_parent_nodes", ":", "tabu_parent_nodes", ".", "append", "(", "self", ".", "_target", ")", "# fit the structured model", "self", ".", "graph_", "=", "notears", ".", "from_pandas", "(", "X", ",", "dist_type_schema", "=", "dist_type_schema", ",", "lasso_beta", "=", "self", ".", "alpha", ",", "ridge_beta", "=", "self", ".", "beta", ",", "hidden_layer_units", "=", "self", ".", "hidden_layer_units", ",", "w_threshold", "=", "self", ".", "threshold", ",", "tabu_edges", "=", "self", ".", "tabu_edges", ",", "tabu_parent_nodes", "=", "tabu_parent_nodes", ",", "tabu_child_nodes", "=", "self", ".", "tabu_child_nodes", ",", "use_bias", "=", "self", ".", "fit_intercept", ",", "*", "*", "(", "self", ".", "notears_mlp_kwargs", "or", "{", "}", ")", ",", ")", "# keep thresholding until the DAG constraint is enforced", "if", "self", ".", "enforce_dag", ":", "self", ".", "graph_", ".", "threshold_till_dag", "(", ")", "return", "self" ]
https://github.com/quantumblacklabs/causalnex/blob/127d9324a3d68c1795299c7522f22cdea880f344/causalnex/structure/pytorch/sklearn/_base.py#L175-L255
microsoft/botbuilder-python
3d410365461dc434df59bdfeaa2f16d28d9df868
libraries/botbuilder-dialogs/botbuilder/dialogs/memory/dialog_state_manager.py
python
DialogStateManager.remove_item
(self, item: Tuple[str, object])
Determines whether the dialog state manager contains a specific value (should use __contains__). :param item: The tuple of the item to locate. :return bool: True if item is found in the dialog state manager otherwise, False
Determines whether the dialog state manager contains a specific value (should use __contains__). :param item: The tuple of the item to locate. :return bool: True if item is found in the dialog state manager otherwise, False
[ "Determines", "whether", "the", "dialog", "state", "manager", "contains", "a", "specific", "value", "(", "should", "use", "__contains__", ")", ".", ":", "param", "item", ":", "The", "tuple", "of", "the", "item", "to", "locate", ".", ":", "return", "bool", ":", "True", "if", "item", "is", "found", "in", "the", "dialog", "state", "manager", "otherwise", "False" ]
def remove_item(self, item: Tuple[str, object]) -> bool: """ Determines whether the dialog state manager contains a specific value (should use __contains__). :param item: The tuple of the item to locate. :return bool: True if item is found in the dialog state manager otherwise, False """ raise RuntimeError("Not supported")
[ "def", "remove_item", "(", "self", ",", "item", ":", "Tuple", "[", "str", ",", "object", "]", ")", "->", "bool", ":", "raise", "RuntimeError", "(", "\"Not supported\"", ")" ]
https://github.com/microsoft/botbuilder-python/blob/3d410365461dc434df59bdfeaa2f16d28d9df868/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/dialog_state_manager.py#L534-L540
IJDykeman/wangTiles
7c1ee2095ebdf7f72bce07d94c6484915d5cae8b
experimental_code/tiles_3d/venv_mac_py3/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py
python
TarFile.makeunknown
(self, tarinfo, targetpath)
Make a file from a TarInfo object with an unknown type at targetpath.
Make a file from a TarInfo object with an unknown type at targetpath.
[ "Make", "a", "file", "from", "a", "TarInfo", "object", "with", "an", "unknown", "type", "at", "targetpath", "." ]
def makeunknown(self, tarinfo, targetpath): """Make a file from a TarInfo object with an unknown type at targetpath. """ self.makefile(tarinfo, targetpath) self._dbg(1, "tarfile: Unknown file type %r, " \ "extracted as regular file." % tarinfo.type)
[ "def", "makeunknown", "(", "self", ",", "tarinfo", ",", "targetpath", ")", ":", "self", ".", "makefile", "(", "tarinfo", ",", "targetpath", ")", "self", ".", "_dbg", "(", "1", ",", "\"tarfile: Unknown file type %r, \"", "\"extracted as regular file.\"", "%", "tarinfo", ".", "type", ")" ]
https://github.com/IJDykeman/wangTiles/blob/7c1ee2095ebdf7f72bce07d94c6484915d5cae8b/experimental_code/tiles_3d/venv_mac_py3/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py#L2312-L2318
tenzir/threatbus
a26096e7b61b3eddf25c445d40a6cd2ea4420558
apps/stix-shifter/stix_shifter_threatbus/shifter.py
python
query_indicator
( indicator: Indicator, module: str, opts: dict, sightings_queue: asyncio.Queue )
Translates an indicator into a module-specific query and executes it. E.g., if the module is `splunk`, the indicator's pattern is first translated into a valid Splunk query and then executed via the Splunk REST API. @param indicator The indicator to translate and query @param module The module's name, e.g., `splunk` @param opts The module configuration directly taken from the user-defined configuration file `config.yaml` with which this app was started @param sightings_queue The queue to put sightings into
Translates an indicator into a module-specific query and executes it. E.g., if the module is `splunk`, the indicator's pattern is first translated into a valid Splunk query and then executed via the Splunk REST API.
[ "Translates", "an", "indicator", "into", "a", "module", "-", "specific", "query", "and", "executes", "it", ".", "E", ".", "g", ".", "if", "the", "module", "is", "splunk", "the", "indicator", "s", "pattern", "is", "first", "translated", "into", "a", "valid", "Splunk", "query", "and", "then", "executed", "via", "the", "Splunk", "REST", "API", "." ]
async def query_indicator( indicator: Indicator, module: str, opts: dict, sightings_queue: asyncio.Queue ): """ Translates an indicator into a module-specific query and executes it. E.g., if the module is `splunk`, the indicator's pattern is first translated into a valid Splunk query and then executed via the Splunk REST API. @param indicator The indicator to translate and query @param module The module's name, e.g., `splunk` @param opts The module configuration directly taken from the user-defined configuration file `config.yaml` with which this app was started @param sightings_queue The queue to put sightings into """ max_results = opts["max_results"] connection_opts = opts["connection"] transmission_opts = opts.get("transmission", {}) translation_opts = opts.get("translation", {}) data_source = opts["data_source"] ## Translate the pattern to a module-specific query. translation = stix_translation.StixTranslation() dsl = translation.translate( module, "query", indicator, indicator.pattern, translation_opts ) if not dsl.get("queries", None): logger.error( f"Failed to translate STIX-2 indicator with ID '{indicator.id}' to query for module '{module}': {dsl}" ) return logger.debug(f"Translated pattern to {module} query: {dsl}") ## Run the query against the configured endpoint for this module. transmission = stix_transmission.StixTransmission( module, connection_opts, transmission_opts ) query_results = [] for query in dsl["queries"]: search_result = transmission.query(query) if not search_result["success"]: logger.error(str(search_result)) continue search_id = search_result["search_id"] if transmission.is_async(): status = transmission.status(search_id) if not status.get("success", None): logger.error(f"Fetching query status failed for module '{module}'") return while status["progress"] < 100 and status["status"] == "RUNNING": status = transmission.status(search_id) await asyncio.sleep(0.05) result = transmission.results(search_id, 0, max_results) if result["success"]: # Collect all results query_results += result["data"] else: logger.error(f"Fetching results failed for module '{module}': {result}") ## Translate query_results to STIX. if not query_results: return stix_results = translation.translate( module, "results", json.dumps(data_source), json.dumps(query_results), translation_opts, ) ## Parse output and report back sightings to Threat Bus ## The stix_results is always a self-made bundle with at least an `objects` ## field present. The bundle may be invalid STIX though, so we cannot simply ## invoke `parse()`. See this link for details on the bundle stucture: ## https://github.com/opencybersecurityalliance/stix-shifter/blob/3.4.5/stix_shifter_utils/stix_translation/src/json_to_stix/json_to_stix_translator.py#L12 objs = stix_results.get("objects", None) if objs is None: logger.error( f"Received STIX bundle without `objects` field, cannot generate sightings: {stix_results}" ) return for sighting in map_bundle_to_sightings(indicator, objs): await sightings_queue.put(sighting)
[ "async", "def", "query_indicator", "(", "indicator", ":", "Indicator", ",", "module", ":", "str", ",", "opts", ":", "dict", ",", "sightings_queue", ":", "asyncio", ".", "Queue", ")", ":", "max_results", "=", "opts", "[", "\"max_results\"", "]", "connection_opts", "=", "opts", "[", "\"connection\"", "]", "transmission_opts", "=", "opts", ".", "get", "(", "\"transmission\"", ",", "{", "}", ")", "translation_opts", "=", "opts", ".", "get", "(", "\"translation\"", ",", "{", "}", ")", "data_source", "=", "opts", "[", "\"data_source\"", "]", "## Translate the pattern to a module-specific query.", "translation", "=", "stix_translation", ".", "StixTranslation", "(", ")", "dsl", "=", "translation", ".", "translate", "(", "module", ",", "\"query\"", ",", "indicator", ",", "indicator", ".", "pattern", ",", "translation_opts", ")", "if", "not", "dsl", ".", "get", "(", "\"queries\"", ",", "None", ")", ":", "logger", ".", "error", "(", "f\"Failed to translate STIX-2 indicator with ID '{indicator.id}' to query for module '{module}': {dsl}\"", ")", "return", "logger", ".", "debug", "(", "f\"Translated pattern to {module} query: {dsl}\"", ")", "## Run the query against the configured endpoint for this module.", "transmission", "=", "stix_transmission", ".", "StixTransmission", "(", "module", ",", "connection_opts", ",", "transmission_opts", ")", "query_results", "=", "[", "]", "for", "query", "in", "dsl", "[", "\"queries\"", "]", ":", "search_result", "=", "transmission", ".", "query", "(", "query", ")", "if", "not", "search_result", "[", "\"success\"", "]", ":", "logger", ".", "error", "(", "str", "(", "search_result", ")", ")", "continue", "search_id", "=", "search_result", "[", "\"search_id\"", "]", "if", "transmission", ".", "is_async", "(", ")", ":", "status", "=", "transmission", ".", "status", "(", "search_id", ")", "if", "not", "status", ".", "get", "(", "\"success\"", ",", "None", ")", ":", "logger", ".", "error", "(", "f\"Fetching query status failed for module '{module}'\"", ")", "return", "while", "status", "[", "\"progress\"", "]", "<", "100", "and", "status", "[", "\"status\"", "]", "==", "\"RUNNING\"", ":", "status", "=", "transmission", ".", "status", "(", "search_id", ")", "await", "asyncio", ".", "sleep", "(", "0.05", ")", "result", "=", "transmission", ".", "results", "(", "search_id", ",", "0", ",", "max_results", ")", "if", "result", "[", "\"success\"", "]", ":", "# Collect all results", "query_results", "+=", "result", "[", "\"data\"", "]", "else", ":", "logger", ".", "error", "(", "f\"Fetching results failed for module '{module}': {result}\"", ")", "## Translate query_results to STIX.", "if", "not", "query_results", ":", "return", "stix_results", "=", "translation", ".", "translate", "(", "module", ",", "\"results\"", ",", "json", ".", "dumps", "(", "data_source", ")", ",", "json", ".", "dumps", "(", "query_results", ")", ",", "translation_opts", ",", ")", "## Parse output and report back sightings to Threat Bus", "## The stix_results is always a self-made bundle with at least an `objects`", "## field present. The bundle may be invalid STIX though, so we cannot simply", "## invoke `parse()`. See this link for details on the bundle stucture:", "## https://github.com/opencybersecurityalliance/stix-shifter/blob/3.4.5/stix_shifter_utils/stix_translation/src/json_to_stix/json_to_stix_translator.py#L12", "objs", "=", "stix_results", ".", "get", "(", "\"objects\"", ",", "None", ")", "if", "objs", "is", "None", ":", "logger", ".", "error", "(", "f\"Received STIX bundle without `objects` field, cannot generate sightings: {stix_results}\"", ")", "return", "for", "sighting", "in", "map_bundle_to_sightings", "(", "indicator", ",", "objs", ")", ":", "await", "sightings_queue", ".", "put", "(", "sighting", ")" ]
https://github.com/tenzir/threatbus/blob/a26096e7b61b3eddf25c445d40a6cd2ea4420558/apps/stix-shifter/stix_shifter_threatbus/shifter.py#L360-L442
dimagi/commcare-hq
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
custom/up_nrhm/reports/asha_functionality_checklist_report.py
python
ASHAFunctionalityChecklistReport.ashas
(self)
return sorted(list(self.model_data.data.values()), key=lambda x: x['completed_on'])
[]
def ashas(self): return sorted(list(self.model_data.data.values()), key=lambda x: x['completed_on'])
[ "def", "ashas", "(", "self", ")", ":", "return", "sorted", "(", "list", "(", "self", ".", "model_data", ".", "data", ".", "values", "(", ")", ")", ",", "key", "=", "lambda", "x", ":", "x", "[", "'completed_on'", "]", ")" ]
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/custom/up_nrhm/reports/asha_functionality_checklist_report.py#L30-L31
ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework
cb692f527e4e819b6c228187c5702d990a180043
bin/x86/Debug/scripting_engine/Lib/xmlrpclib.py
python
gzip_decode
(data)
return decoded
gzip encoded data -> unencoded data Decode data using the gzip content encoding as described in RFC 1952
gzip encoded data -> unencoded data
[ "gzip", "encoded", "data", "-", ">", "unencoded", "data" ]
def gzip_decode(data): """gzip encoded data -> unencoded data Decode data using the gzip content encoding as described in RFC 1952 """ if not gzip: raise NotImplementedError f = StringIO.StringIO(data) gzf = gzip.GzipFile(mode="rb", fileobj=f) try: decoded = gzf.read() except IOError: raise ValueError("invalid data") f.close() gzf.close() return decoded
[ "def", "gzip_decode", "(", "data", ")", ":", "if", "not", "gzip", ":", "raise", "NotImplementedError", "f", "=", "StringIO", ".", "StringIO", "(", "data", ")", "gzf", "=", "gzip", ".", "GzipFile", "(", "mode", "=", "\"rb\"", ",", "fileobj", "=", "f", ")", "try", ":", "decoded", "=", "gzf", ".", "read", "(", ")", "except", "IOError", ":", "raise", "ValueError", "(", "\"invalid data\"", ")", "f", ".", "close", "(", ")", "gzf", ".", "close", "(", ")", "return", "decoded" ]
https://github.com/ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework/blob/cb692f527e4e819b6c228187c5702d990a180043/bin/x86/Debug/scripting_engine/Lib/xmlrpclib.py#L1171-L1186
dorneanu/smalisca
1aa7a164ee3060afd51d0524b80aa6e8bbec3515
smalisca/modules/module_graph.py
python
add_nodes
(graph, nodes)
return graph
Add node(s) to graph Args: graph (Graph): Graph to add node(s) to nodes (list): List of nodes Returns: Graph: Return the modified graph
Add node(s) to graph
[ "Add", "node", "(", "s", ")", "to", "graph" ]
def add_nodes(graph, nodes): """ Add node(s) to graph Args: graph (Graph): Graph to add node(s) to nodes (list): List of nodes Returns: Graph: Return the modified graph """ for n in nodes: if isinstance(n, tuple): graph.node(n[0], **n[1]) else: graph.node(n) return graph
[ "def", "add_nodes", "(", "graph", ",", "nodes", ")", ":", "for", "n", "in", "nodes", ":", "if", "isinstance", "(", "n", ",", "tuple", ")", ":", "graph", ".", "node", "(", "n", "[", "0", "]", ",", "*", "*", "n", "[", "1", "]", ")", "else", ":", "graph", ".", "node", "(", "n", ")", "return", "graph" ]
https://github.com/dorneanu/smalisca/blob/1aa7a164ee3060afd51d0524b80aa6e8bbec3515/smalisca/modules/module_graph.py#L42-L58
cw1204772/AIC2018_iamai
9c3720ba5eeb94e02deed303f32acaaa80aa893d
Detection/lib/datasets/json_dataset.py
python
_merge_proposal_boxes_into_roidb
(roidb, box_list)
Add proposal boxes to each roidb entry.
Add proposal boxes to each roidb entry.
[ "Add", "proposal", "boxes", "to", "each", "roidb", "entry", "." ]
def _merge_proposal_boxes_into_roidb(roidb, box_list): """Add proposal boxes to each roidb entry.""" assert len(box_list) == len(roidb) for i, entry in enumerate(roidb): boxes = box_list[i] num_boxes = boxes.shape[0] gt_overlaps = np.zeros( (num_boxes, entry['gt_overlaps'].shape[1]), dtype=entry['gt_overlaps'].dtype ) box_to_gt_ind_map = -np.ones( (num_boxes), dtype=entry['box_to_gt_ind_map'].dtype ) # Note: unlike in other places, here we intentionally include all gt # rois, even ones marked as crowd. Boxes that overlap with crowds will # be filtered out later (see: _filter_crowd_proposals). gt_inds = np.where(entry['gt_classes'] > 0)[0] if len(gt_inds) > 0: gt_boxes = entry['boxes'][gt_inds, :] gt_classes = entry['gt_classes'][gt_inds] proposal_to_gt_overlaps = box_utils.bbox_overlaps( boxes.astype(dtype=np.float32, copy=False), gt_boxes.astype(dtype=np.float32, copy=False) ) # Gt box that overlaps each input box the most # (ties are broken arbitrarily by class order) argmaxes = proposal_to_gt_overlaps.argmax(axis=1) # Amount of that overlap maxes = proposal_to_gt_overlaps.max(axis=1) # Those boxes with non-zero overlap with gt boxes I = np.where(maxes > 0)[0] # Record max overlaps with the class of the appropriate gt box gt_overlaps[I, gt_classes[argmaxes[I]]] = maxes[I] box_to_gt_ind_map[I] = gt_inds[argmaxes[I]] entry['boxes'] = np.append( entry['boxes'], boxes.astype(entry['boxes'].dtype, copy=False), axis=0 ) entry['gt_classes'] = np.append( entry['gt_classes'], np.zeros((num_boxes), dtype=entry['gt_classes'].dtype) ) entry['seg_areas'] = np.append( entry['seg_areas'], np.zeros((num_boxes), dtype=entry['seg_areas'].dtype) ) entry['gt_overlaps'] = np.append( entry['gt_overlaps'].toarray(), gt_overlaps, axis=0 ) entry['gt_overlaps'] = scipy.sparse.csr_matrix(entry['gt_overlaps']) entry['is_crowd'] = np.append( entry['is_crowd'], np.zeros((num_boxes), dtype=entry['is_crowd'].dtype) ) entry['box_to_gt_ind_map'] = np.append( entry['box_to_gt_ind_map'], box_to_gt_ind_map.astype( entry['box_to_gt_ind_map'].dtype, copy=False ) )
[ "def", "_merge_proposal_boxes_into_roidb", "(", "roidb", ",", "box_list", ")", ":", "assert", "len", "(", "box_list", ")", "==", "len", "(", "roidb", ")", "for", "i", ",", "entry", "in", "enumerate", "(", "roidb", ")", ":", "boxes", "=", "box_list", "[", "i", "]", "num_boxes", "=", "boxes", ".", "shape", "[", "0", "]", "gt_overlaps", "=", "np", ".", "zeros", "(", "(", "num_boxes", ",", "entry", "[", "'gt_overlaps'", "]", ".", "shape", "[", "1", "]", ")", ",", "dtype", "=", "entry", "[", "'gt_overlaps'", "]", ".", "dtype", ")", "box_to_gt_ind_map", "=", "-", "np", ".", "ones", "(", "(", "num_boxes", ")", ",", "dtype", "=", "entry", "[", "'box_to_gt_ind_map'", "]", ".", "dtype", ")", "# Note: unlike in other places, here we intentionally include all gt", "# rois, even ones marked as crowd. Boxes that overlap with crowds will", "# be filtered out later (see: _filter_crowd_proposals).", "gt_inds", "=", "np", ".", "where", "(", "entry", "[", "'gt_classes'", "]", ">", "0", ")", "[", "0", "]", "if", "len", "(", "gt_inds", ")", ">", "0", ":", "gt_boxes", "=", "entry", "[", "'boxes'", "]", "[", "gt_inds", ",", ":", "]", "gt_classes", "=", "entry", "[", "'gt_classes'", "]", "[", "gt_inds", "]", "proposal_to_gt_overlaps", "=", "box_utils", ".", "bbox_overlaps", "(", "boxes", ".", "astype", "(", "dtype", "=", "np", ".", "float32", ",", "copy", "=", "False", ")", ",", "gt_boxes", ".", "astype", "(", "dtype", "=", "np", ".", "float32", ",", "copy", "=", "False", ")", ")", "# Gt box that overlaps each input box the most", "# (ties are broken arbitrarily by class order)", "argmaxes", "=", "proposal_to_gt_overlaps", ".", "argmax", "(", "axis", "=", "1", ")", "# Amount of that overlap", "maxes", "=", "proposal_to_gt_overlaps", ".", "max", "(", "axis", "=", "1", ")", "# Those boxes with non-zero overlap with gt boxes", "I", "=", "np", ".", "where", "(", "maxes", ">", "0", ")", "[", "0", "]", "# Record max overlaps with the class of the appropriate gt box", "gt_overlaps", "[", "I", ",", "gt_classes", "[", "argmaxes", "[", "I", "]", "]", "]", "=", "maxes", "[", "I", "]", "box_to_gt_ind_map", "[", "I", "]", "=", "gt_inds", "[", "argmaxes", "[", "I", "]", "]", "entry", "[", "'boxes'", "]", "=", "np", ".", "append", "(", "entry", "[", "'boxes'", "]", ",", "boxes", ".", "astype", "(", "entry", "[", "'boxes'", "]", ".", "dtype", ",", "copy", "=", "False", ")", ",", "axis", "=", "0", ")", "entry", "[", "'gt_classes'", "]", "=", "np", ".", "append", "(", "entry", "[", "'gt_classes'", "]", ",", "np", ".", "zeros", "(", "(", "num_boxes", ")", ",", "dtype", "=", "entry", "[", "'gt_classes'", "]", ".", "dtype", ")", ")", "entry", "[", "'seg_areas'", "]", "=", "np", ".", "append", "(", "entry", "[", "'seg_areas'", "]", ",", "np", ".", "zeros", "(", "(", "num_boxes", ")", ",", "dtype", "=", "entry", "[", "'seg_areas'", "]", ".", "dtype", ")", ")", "entry", "[", "'gt_overlaps'", "]", "=", "np", ".", "append", "(", "entry", "[", "'gt_overlaps'", "]", ".", "toarray", "(", ")", ",", "gt_overlaps", ",", "axis", "=", "0", ")", "entry", "[", "'gt_overlaps'", "]", "=", "scipy", ".", "sparse", ".", "csr_matrix", "(", "entry", "[", "'gt_overlaps'", "]", ")", "entry", "[", "'is_crowd'", "]", "=", "np", ".", "append", "(", "entry", "[", "'is_crowd'", "]", ",", "np", ".", "zeros", "(", "(", "num_boxes", ")", ",", "dtype", "=", "entry", "[", "'is_crowd'", "]", ".", "dtype", ")", ")", "entry", "[", "'box_to_gt_ind_map'", "]", "=", "np", ".", "append", "(", "entry", "[", "'box_to_gt_ind_map'", "]", ",", "box_to_gt_ind_map", ".", "astype", "(", "entry", "[", "'box_to_gt_ind_map'", "]", ".", "dtype", ",", "copy", "=", "False", ")", ")" ]
https://github.com/cw1204772/AIC2018_iamai/blob/9c3720ba5eeb94e02deed303f32acaaa80aa893d/Detection/lib/datasets/json_dataset.py#L349-L410
dbcli/mssql-cli
6509aa2fc226dde8ce6bab7af9cbb5f03717b936
build.py
python
build
()
Builds mssql-cli package.
Builds mssql-cli package.
[ "Builds", "mssql", "-", "cli", "package", "." ]
def build(): """ Builds mssql-cli package. """ print_heading('Cleanup') # clean utility.clean_up(utility.MSSQLCLI_DIST_DIRECTORY) utility.clean_up_egg_info_sub_directories(utility.ROOT_DIR) print_heading('Running setup') # install general requirements. utility.exec_command( '{0} -m pip install -r requirements-dev.txt'.format(PYTHON), utility.ROOT_DIR) # convert windows line endings to unix for mssql-cli bash script utility.exec_command( '{0} dos2unix.py mssql-cli mssql-cli'.format(PYTHON), utility.ROOT_DIR) # run flake8 code_analysis() if utility.get_current_platform().startswith('win'): platforms_to_build = ['win32', 'win_amd64'] else: platforms_to_build = [utility.get_current_platform()] for plat in platforms_to_build: # For the current platform, populate the appropriate binaries and # generate the wheel. clean_and_copy_sqltoolsservice(plat) utility.clean_up(utility.MSSQLCLI_BUILD_DIRECTORY) print_heading('Building mssql-cli pip package') utility.exec_command('%s --version' % PYTHON, utility.ROOT_DIR) utility.exec_command('%s setup.py bdist_wheel --plat-name %s' % (PYTHON, plat), utility.ROOT_DIR, continue_on_error=False) try: # checks if long description will render correctly--does not work on some systems utility.exec_command('twine check {}' .format(os.path.join(utility.MSSQLCLI_DIST_DIRECTORY, '*')), utility.ROOT_DIR, continue_on_error=False) except IOError as err: print(err) print("Unable to run 'twine check'.") # Copy back the SqlToolsService binaries for this platform. clean_and_copy_sqltoolsservice(utility.get_current_platform()) copy_and_rename_wheels()
[ "def", "build", "(", ")", ":", "print_heading", "(", "'Cleanup'", ")", "# clean", "utility", ".", "clean_up", "(", "utility", ".", "MSSQLCLI_DIST_DIRECTORY", ")", "utility", ".", "clean_up_egg_info_sub_directories", "(", "utility", ".", "ROOT_DIR", ")", "print_heading", "(", "'Running setup'", ")", "# install general requirements.", "utility", ".", "exec_command", "(", "'{0} -m pip install -r requirements-dev.txt'", ".", "format", "(", "PYTHON", ")", ",", "utility", ".", "ROOT_DIR", ")", "# convert windows line endings to unix for mssql-cli bash script", "utility", ".", "exec_command", "(", "'{0} dos2unix.py mssql-cli mssql-cli'", ".", "format", "(", "PYTHON", ")", ",", "utility", ".", "ROOT_DIR", ")", "# run flake8", "code_analysis", "(", ")", "if", "utility", ".", "get_current_platform", "(", ")", ".", "startswith", "(", "'win'", ")", ":", "platforms_to_build", "=", "[", "'win32'", ",", "'win_amd64'", "]", "else", ":", "platforms_to_build", "=", "[", "utility", ".", "get_current_platform", "(", ")", "]", "for", "plat", "in", "platforms_to_build", ":", "# For the current platform, populate the appropriate binaries and", "# generate the wheel.", "clean_and_copy_sqltoolsservice", "(", "plat", ")", "utility", ".", "clean_up", "(", "utility", ".", "MSSQLCLI_BUILD_DIRECTORY", ")", "print_heading", "(", "'Building mssql-cli pip package'", ")", "utility", ".", "exec_command", "(", "'%s --version'", "%", "PYTHON", ",", "utility", ".", "ROOT_DIR", ")", "utility", ".", "exec_command", "(", "'%s setup.py bdist_wheel --plat-name %s'", "%", "(", "PYTHON", ",", "plat", ")", ",", "utility", ".", "ROOT_DIR", ",", "continue_on_error", "=", "False", ")", "try", ":", "# checks if long description will render correctly--does not work on some systems", "utility", ".", "exec_command", "(", "'twine check {}'", ".", "format", "(", "os", ".", "path", ".", "join", "(", "utility", ".", "MSSQLCLI_DIST_DIRECTORY", ",", "'*'", ")", ")", ",", "utility", ".", "ROOT_DIR", ",", "continue_on_error", "=", "False", ")", "except", "IOError", "as", "err", ":", "print", "(", "err", ")", "print", "(", "\"Unable to run 'twine check'.\"", ")", "# Copy back the SqlToolsService binaries for this platform.", "clean_and_copy_sqltoolsservice", "(", "utility", ".", "get_current_platform", "(", ")", ")", "copy_and_rename_wheels", "(", ")" ]
https://github.com/dbcli/mssql-cli/blob/6509aa2fc226dde8ce6bab7af9cbb5f03717b936/build.py#L41-L94
pwnieexpress/pwn_plug_sources
1a23324f5dc2c3de20f9c810269b6a29b2758cad
src/voiper/sulley/impacket/smb.py
python
set_key_odd_parity
(key)
return key
[]
def set_key_odd_parity(key): "" for i in range(len(key)): for k in range(7): bit = 0 t = key[i] >> k bit = (t ^ bit) & 0x1 key[i] = (key[i] & 0xFE) | bit return key
[ "def", "set_key_odd_parity", "(", "key", ")", ":", "for", "i", "in", "range", "(", "len", "(", "key", ")", ")", ":", "for", "k", "in", "range", "(", "7", ")", ":", "bit", "=", "0", "t", "=", "key", "[", "i", "]", ">>", "k", "bit", "=", "(", "t", "^", "bit", ")", "&", "0x1", "key", "[", "i", "]", "=", "(", "key", "[", "i", "]", "&", "0xFE", ")", "|", "bit", "return", "key" ]
https://github.com/pwnieexpress/pwn_plug_sources/blob/1a23324f5dc2c3de20f9c810269b6a29b2758cad/src/voiper/sulley/impacket/smb.py#L135-L144
ahmetcemturan/SFACT
7576e29ba72b33e5058049b77b7b558875542747
fabmetheus_utilities/xml_simple_reader.py
python
DocumentTypeMonad.getNextMonad
(self, character)
return self
Get the next monad.
Get the next monad.
[ "Get", "the", "next", "monad", "." ]
def getNextMonad(self, character): 'Get the next monad.' self.input.write(character) if character == '>': inputString = self.input.getvalue() if inputString.endswith('?>'): textContent = '%s\n' % inputString self.parentNode.childNodes.append(DocumentTypeNode(self.parentNode, textContent)) return OpenMonad(self.parentNode) return self
[ "def", "getNextMonad", "(", "self", ",", "character", ")", ":", "self", ".", "input", ".", "write", "(", "character", ")", "if", "character", "==", "'>'", ":", "inputString", "=", "self", ".", "input", ".", "getvalue", "(", ")", "if", "inputString", ".", "endswith", "(", "'?>'", ")", ":", "textContent", "=", "'%s\\n'", "%", "inputString", "self", ".", "parentNode", ".", "childNodes", ".", "append", "(", "DocumentTypeNode", "(", "self", ".", "parentNode", ",", "textContent", ")", ")", "return", "OpenMonad", "(", "self", ".", "parentNode", ")", "return", "self" ]
https://github.com/ahmetcemturan/SFACT/blob/7576e29ba72b33e5058049b77b7b558875542747/fabmetheus_utilities/xml_simple_reader.py#L312-L321
GoogleCloudPlatform/datastore-ndb-python
cf4cab3f1f69cd04e1a9229871be466b53729f3f
ndb/eventloop.py
python
get_event_loop
()
return ev
Return a EventLoop instance. A new instance is created for each new HTTP request. We determine that we're in a new request by inspecting os.environ, which is reset at the start of each request. Also, each thread gets its own loop.
Return a EventLoop instance.
[ "Return", "a", "EventLoop", "instance", "." ]
def get_event_loop(): """Return a EventLoop instance. A new instance is created for each new HTTP request. We determine that we're in a new request by inspecting os.environ, which is reset at the start of each request. Also, each thread gets its own loop. """ ev = _state.event_loop if not os.getenv(_EVENT_LOOP_KEY) and ev is not None: ev.clear() _state.event_loop = None ev = None if ev is None: ev = EventLoop() _state.event_loop = ev os.environ[_EVENT_LOOP_KEY] = '1' return ev
[ "def", "get_event_loop", "(", ")", ":", "ev", "=", "_state", ".", "event_loop", "if", "not", "os", ".", "getenv", "(", "_EVENT_LOOP_KEY", ")", "and", "ev", "is", "not", "None", ":", "ev", ".", "clear", "(", ")", "_state", ".", "event_loop", "=", "None", "ev", "=", "None", "if", "ev", "is", "None", ":", "ev", "=", "EventLoop", "(", ")", "_state", ".", "event_loop", "=", "ev", "os", ".", "environ", "[", "_EVENT_LOOP_KEY", "]", "=", "'1'", "return", "ev" ]
https://github.com/GoogleCloudPlatform/datastore-ndb-python/blob/cf4cab3f1f69cd04e1a9229871be466b53729f3f/ndb/eventloop.py#L293-L309
GNS3/gns3-gui
da8adbaa18ab60e053af2a619efd468f4c8950f3
gns3/progress.py
python
Progress.instance
(parent=None)
return Progress._instance
Singleton to return only one instance of Progress. :returns: instance of Progress
Singleton to return only one instance of Progress.
[ "Singleton", "to", "return", "only", "one", "instance", "of", "Progress", "." ]
def instance(parent=None): """ Singleton to return only one instance of Progress. :returns: instance of Progress """ if not hasattr(Progress, "_instance") or Progress._instance is None: Progress._instance = Progress(parent) return Progress._instance
[ "def", "instance", "(", "parent", "=", "None", ")", ":", "if", "not", "hasattr", "(", "Progress", ",", "\"_instance\"", ")", "or", "Progress", ".", "_instance", "is", "None", ":", "Progress", ".", "_instance", "=", "Progress", "(", "parent", ")", "return", "Progress", ".", "_instance" ]
https://github.com/GNS3/gns3-gui/blob/da8adbaa18ab60e053af2a619efd468f4c8950f3/gns3/progress.py#L258-L267
BigBrotherBot/big-brother-bot
848823c71413c86e7f1ff9584f43e08d40a7f2c0
b3/plugins/poweradminurt/iourt41.py
python
Poweradminurt41Plugin.cmd_pagear
(self, data, client=None, cmd=None)
[<all/none/reset/[+-](nade|snipe|spas|pistol|auto|negev)>] - Set allowed weapons.
[<all/none/reset/[+-](nade|snipe|spas|pistol|auto|negev)>] - Set allowed weapons.
[ "[", "<all", "/", "none", "/", "reset", "/", "[", "+", "-", "]", "(", "nade|snipe|spas|pistol|auto|negev", ")", ">", "]", "-", "Set", "allowed", "weapons", "." ]
def cmd_pagear(self, data, client=None, cmd=None): """ [<all/none/reset/[+-](nade|snipe|spas|pistol|auto|negev)>] - Set allowed weapons. """ cur_gear = self.console.getCvar('g_gear').getInt() if not data: if client: nade = (cur_gear & 1) != 1 snipe = (cur_gear & 2) != 2 spas = (cur_gear & 4) != 4 pist = (cur_gear & 8) != 8 auto = (cur_gear & 16) != 16 nege = (cur_gear & 32) != 32 self.console.write('^7current gear: %s (Nade:%d, Sniper:%d, Spas:%d, Pistol:%d, Auto:%d, Negev:%d)' % (cur_gear, nade, snipe, spas, pist, auto, nege)) return else: if not data[:5] in ('all', 'none', 'reset', '+nade', '+snip', '+spas', '+pist', '+auto', '+nege', '-nade', '-snip', '-spas', '-pist', '-auto', '-nege'): if client: client.message('^7Invalid data, try !help pagear') else: self.debug('invalid data sent to cmd_pagear') return if data[:5] == 'all': self.console.setCvar('g_gear', '0') elif data[:5] == 'none': self.console.setCvar('g_gear', '63') elif data[:5] == 'reset': self.console.setCvar('g_gear', '%s' % self._origgear) else: if data[1:5] == 'nade': bit = 1 elif data[1:5] == 'snip': bit = 2 elif data[1:5] == 'spas': bit = 4 elif data[1:5] == 'pist': bit = 8 elif data[1:5] == 'auto': bit = 16 elif data[1:5] == 'nege': bit = 32 else: return if data[:1] == '+': self.console.setCvar('g_gear', '%s' % (cur_gear & (63 - bit))) elif data[:1] == '-': self.console.setCvar('g_gear', '%s' % (cur_gear | bit))
[ "def", "cmd_pagear", "(", "self", ",", "data", ",", "client", "=", "None", ",", "cmd", "=", "None", ")", ":", "cur_gear", "=", "self", ".", "console", ".", "getCvar", "(", "'g_gear'", ")", ".", "getInt", "(", ")", "if", "not", "data", ":", "if", "client", ":", "nade", "=", "(", "cur_gear", "&", "1", ")", "!=", "1", "snipe", "=", "(", "cur_gear", "&", "2", ")", "!=", "2", "spas", "=", "(", "cur_gear", "&", "4", ")", "!=", "4", "pist", "=", "(", "cur_gear", "&", "8", ")", "!=", "8", "auto", "=", "(", "cur_gear", "&", "16", ")", "!=", "16", "nege", "=", "(", "cur_gear", "&", "32", ")", "!=", "32", "self", ".", "console", ".", "write", "(", "'^7current gear: %s (Nade:%d, Sniper:%d, Spas:%d, Pistol:%d, Auto:%d, Negev:%d)'", "%", "(", "cur_gear", ",", "nade", ",", "snipe", ",", "spas", ",", "pist", ",", "auto", ",", "nege", ")", ")", "return", "else", ":", "if", "not", "data", "[", ":", "5", "]", "in", "(", "'all'", ",", "'none'", ",", "'reset'", ",", "'+nade'", ",", "'+snip'", ",", "'+spas'", ",", "'+pist'", ",", "'+auto'", ",", "'+nege'", ",", "'-nade'", ",", "'-snip'", ",", "'-spas'", ",", "'-pist'", ",", "'-auto'", ",", "'-nege'", ")", ":", "if", "client", ":", "client", ".", "message", "(", "'^7Invalid data, try !help pagear'", ")", "else", ":", "self", ".", "debug", "(", "'invalid data sent to cmd_pagear'", ")", "return", "if", "data", "[", ":", "5", "]", "==", "'all'", ":", "self", ".", "console", ".", "setCvar", "(", "'g_gear'", ",", "'0'", ")", "elif", "data", "[", ":", "5", "]", "==", "'none'", ":", "self", ".", "console", ".", "setCvar", "(", "'g_gear'", ",", "'63'", ")", "elif", "data", "[", ":", "5", "]", "==", "'reset'", ":", "self", ".", "console", ".", "setCvar", "(", "'g_gear'", ",", "'%s'", "%", "self", ".", "_origgear", ")", "else", ":", "if", "data", "[", "1", ":", "5", "]", "==", "'nade'", ":", "bit", "=", "1", "elif", "data", "[", "1", ":", "5", "]", "==", "'snip'", ":", "bit", "=", "2", "elif", "data", "[", "1", ":", "5", "]", "==", "'spas'", ":", "bit", "=", "4", "elif", "data", "[", "1", ":", "5", "]", "==", "'pist'", ":", "bit", "=", "8", "elif", "data", "[", "1", ":", "5", "]", "==", "'auto'", ":", "bit", "=", "16", "elif", "data", "[", "1", ":", "5", "]", "==", "'nege'", ":", "bit", "=", "32", "else", ":", "return", "if", "data", "[", ":", "1", "]", "==", "'+'", ":", "self", ".", "console", ".", "setCvar", "(", "'g_gear'", ",", "'%s'", "%", "(", "cur_gear", "&", "(", "63", "-", "bit", ")", ")", ")", "elif", "data", "[", ":", "1", "]", "==", "'-'", ":", "self", ".", "console", ".", "setCvar", "(", "'g_gear'", ",", "'%s'", "%", "(", "cur_gear", "|", "bit", ")", ")" ]
https://github.com/BigBrotherBot/big-brother-bot/blob/848823c71413c86e7f1ff9584f43e08d40a7f2c0/b3/plugins/poweradminurt/iourt41.py#L1795-L1852
MrH0wl/Cloudmare
65e5bc9888f9d362ab2abfb103ea6c1e869d67aa
thirdparty/urllib3/poolmanager.py
python
PoolManager.connection_from_host
(self, host, port=None, scheme="http", pool_kwargs=None)
return self.connection_from_context(request_context)
Get a :class:`ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is provided, it is merged with the instance's ``connection_pool_kw`` variable and used to create the new connection pool, if one is needed.
Get a :class:`ConnectionPool` based on the host, port, and scheme.
[ "Get", "a", ":", "class", ":", "ConnectionPool", "based", "on", "the", "host", "port", "and", "scheme", "." ]
def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): """ Get a :class:`ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is provided, it is merged with the instance's ``connection_pool_kw`` variable and used to create the new connection pool, if one is needed. """ if not host: raise LocationValueError("No host specified.") request_context = self._merge_pool_kwargs(pool_kwargs) request_context["scheme"] = scheme or "http" if not port: port = port_by_scheme.get(request_context["scheme"].lower(), 80) request_context["port"] = port request_context["host"] = host return self.connection_from_context(request_context)
[ "def", "connection_from_host", "(", "self", ",", "host", ",", "port", "=", "None", ",", "scheme", "=", "\"http\"", ",", "pool_kwargs", "=", "None", ")", ":", "if", "not", "host", ":", "raise", "LocationValueError", "(", "\"No host specified.\"", ")", "request_context", "=", "self", ".", "_merge_pool_kwargs", "(", "pool_kwargs", ")", "request_context", "[", "\"scheme\"", "]", "=", "scheme", "or", "\"http\"", "if", "not", "port", ":", "port", "=", "port_by_scheme", ".", "get", "(", "request_context", "[", "\"scheme\"", "]", ".", "lower", "(", ")", ",", "80", ")", "request_context", "[", "\"port\"", "]", "=", "port", "request_context", "[", "\"host\"", "]", "=", "host", "return", "self", ".", "connection_from_context", "(", "request_context", ")" ]
https://github.com/MrH0wl/Cloudmare/blob/65e5bc9888f9d362ab2abfb103ea6c1e869d67aa/thirdparty/urllib3/poolmanager.py#L219-L240
Fantomas42/django-blog-zinnia
881101a9d1d455b2fc581d6f4ae0947cdd8126c6
zinnia/feeds.py
python
SearchEntries.description
(self, obj)
return _("The last entries containing the pattern '%(pattern)s'") % { 'pattern': obj}
Description of the feed.
Description of the feed.
[ "Description", "of", "the", "feed", "." ]
def description(self, obj): """ Description of the feed. """ return _("The last entries containing the pattern '%(pattern)s'") % { 'pattern': obj}
[ "def", "description", "(", "self", ",", "obj", ")", ":", "return", "_", "(", "\"The last entries containing the pattern '%(pattern)s'\"", ")", "%", "{", "'pattern'", ":", "obj", "}" ]
https://github.com/Fantomas42/django-blog-zinnia/blob/881101a9d1d455b2fc581d6f4ae0947cdd8126c6/zinnia/feeds.py#L343-L348
achaiah/pywick
9d663faf0c1660a9b8359a6472c164f658dfc8cb
pywick/models/segmentation/refinenet/refinenet.py
python
BaseRefineNet4Cascade.__init__
(self, input_shape, refinenet_block, num_classes=1, features=256, resnet_factory=models.resnet101, pretrained=True, freeze_resnet=False, **kwargs)
Multi-path 4-Cascaded RefineNet for image segmentation Args: input_shape ((int, int)): (channel, size) assumes input has equal height and width refinenet_block (block): RefineNet Block num_classes (int, optional): number of classes features (int, optional): number of features in refinenet resnet_factory (func, optional): A Resnet model from torchvision. Default: models.resnet101 pretrained (bool, optional): Use pretrained version of resnet Default: True freeze_resnet (bool, optional): Freeze resnet model Default: True Raises: ValueError: size of input_shape not divisible by 32
Multi-path 4-Cascaded RefineNet for image segmentation
[ "Multi", "-", "path", "4", "-", "Cascaded", "RefineNet", "for", "image", "segmentation" ]
def __init__(self, input_shape, refinenet_block, num_classes=1, features=256, resnet_factory=models.resnet101, pretrained=True, freeze_resnet=False, **kwargs): """Multi-path 4-Cascaded RefineNet for image segmentation Args: input_shape ((int, int)): (channel, size) assumes input has equal height and width refinenet_block (block): RefineNet Block num_classes (int, optional): number of classes features (int, optional): number of features in refinenet resnet_factory (func, optional): A Resnet model from torchvision. Default: models.resnet101 pretrained (bool, optional): Use pretrained version of resnet Default: True freeze_resnet (bool, optional): Freeze resnet model Default: True Raises: ValueError: size of input_shape not divisible by 32 """ super().__init__() input_channel, input_size = input_shape if input_size % 32 != 0: raise ValueError("{} not divisble by 32".format(input_shape)) resnet = resnet_factory(pretrained=pretrained) self.layer1 = nn.Sequential(resnet.conv1, resnet.bn1, resnet.relu, resnet.maxpool, resnet.layer1) self.layer2 = resnet.layer2 self.layer3 = resnet.layer3 self.layer4 = resnet.layer4 if freeze_resnet: layers = [self.layer1, self.layer2, self.layer3, self.layer4] for layer in layers: for param in layer.parameters(): param.requires_grad = False self.layer1_rn = nn.Conv2d( 256, features, kernel_size=3, stride=1, padding=1, bias=False) self.layer2_rn = nn.Conv2d( 512, features, kernel_size=3, stride=1, padding=1, bias=False) self.layer3_rn = nn.Conv2d( 1024, features, kernel_size=3, stride=1, padding=1, bias=False) self.layer4_rn = nn.Conv2d( 2048, 2 * features, kernel_size=3, stride=1, padding=1, bias=False) self.refinenet4 = RefineNetBlock(2 * features, (2 * features, input_size // 32)) self.refinenet3 = RefineNetBlock(features, (2 * features, input_size // 32), (features, input_size // 16)) self.refinenet2 = RefineNetBlock(features, (features, input_size // 16), (features, input_size // 8)) self.refinenet1 = RefineNetBlock(features, (features, input_size // 8), (features, input_size // 4)) self.output_conv = nn.Sequential( ResidualConvUnit(features), ResidualConvUnit(features), nn.Conv2d( features, num_classes, kernel_size=1, stride=1, padding=0, bias=True))
[ "def", "__init__", "(", "self", ",", "input_shape", ",", "refinenet_block", ",", "num_classes", "=", "1", ",", "features", "=", "256", ",", "resnet_factory", "=", "models", ".", "resnet101", ",", "pretrained", "=", "True", ",", "freeze_resnet", "=", "False", ",", "*", "*", "kwargs", ")", ":", "super", "(", ")", ".", "__init__", "(", ")", "input_channel", ",", "input_size", "=", "input_shape", "if", "input_size", "%", "32", "!=", "0", ":", "raise", "ValueError", "(", "\"{} not divisble by 32\"", ".", "format", "(", "input_shape", ")", ")", "resnet", "=", "resnet_factory", "(", "pretrained", "=", "pretrained", ")", "self", ".", "layer1", "=", "nn", ".", "Sequential", "(", "resnet", ".", "conv1", ",", "resnet", ".", "bn1", ",", "resnet", ".", "relu", ",", "resnet", ".", "maxpool", ",", "resnet", ".", "layer1", ")", "self", ".", "layer2", "=", "resnet", ".", "layer2", "self", ".", "layer3", "=", "resnet", ".", "layer3", "self", ".", "layer4", "=", "resnet", ".", "layer4", "if", "freeze_resnet", ":", "layers", "=", "[", "self", ".", "layer1", ",", "self", ".", "layer2", ",", "self", ".", "layer3", ",", "self", ".", "layer4", "]", "for", "layer", "in", "layers", ":", "for", "param", "in", "layer", ".", "parameters", "(", ")", ":", "param", ".", "requires_grad", "=", "False", "self", ".", "layer1_rn", "=", "nn", ".", "Conv2d", "(", "256", ",", "features", ",", "kernel_size", "=", "3", ",", "stride", "=", "1", ",", "padding", "=", "1", ",", "bias", "=", "False", ")", "self", ".", "layer2_rn", "=", "nn", ".", "Conv2d", "(", "512", ",", "features", ",", "kernel_size", "=", "3", ",", "stride", "=", "1", ",", "padding", "=", "1", ",", "bias", "=", "False", ")", "self", ".", "layer3_rn", "=", "nn", ".", "Conv2d", "(", "1024", ",", "features", ",", "kernel_size", "=", "3", ",", "stride", "=", "1", ",", "padding", "=", "1", ",", "bias", "=", "False", ")", "self", ".", "layer4_rn", "=", "nn", ".", "Conv2d", "(", "2048", ",", "2", "*", "features", ",", "kernel_size", "=", "3", ",", "stride", "=", "1", ",", "padding", "=", "1", ",", "bias", "=", "False", ")", "self", ".", "refinenet4", "=", "RefineNetBlock", "(", "2", "*", "features", ",", "(", "2", "*", "features", ",", "input_size", "//", "32", ")", ")", "self", ".", "refinenet3", "=", "RefineNetBlock", "(", "features", ",", "(", "2", "*", "features", ",", "input_size", "//", "32", ")", ",", "(", "features", ",", "input_size", "//", "16", ")", ")", "self", ".", "refinenet2", "=", "RefineNetBlock", "(", "features", ",", "(", "features", ",", "input_size", "//", "16", ")", ",", "(", "features", ",", "input_size", "//", "8", ")", ")", "self", ".", "refinenet1", "=", "RefineNetBlock", "(", "features", ",", "(", "features", ",", "input_size", "//", "8", ")", ",", "(", "features", ",", "input_size", "//", "4", ")", ")", "self", ".", "output_conv", "=", "nn", ".", "Sequential", "(", "ResidualConvUnit", "(", "features", ")", ",", "ResidualConvUnit", "(", "features", ")", ",", "nn", ".", "Conv2d", "(", "features", ",", "num_classes", ",", "kernel_size", "=", "1", ",", "stride", "=", "1", ",", "padding", "=", "0", ",", "bias", "=", "True", ")", ")" ]
https://github.com/achaiah/pywick/blob/9d663faf0c1660a9b8359a6472c164f658dfc8cb/pywick/models/segmentation/refinenet/refinenet.py#L17-L94
runawayhorse001/LearningApacheSpark
67f3879dce17553195f094f5728b94a01badcf24
pyspark/mllib/linalg/distributed.py
python
BlockMatrix.add
(self, other)
return BlockMatrix(java_block_matrix, self.rowsPerBlock, self.colsPerBlock)
Adds two block matrices together. The matrices must have the same size and matching `rowsPerBlock` and `colsPerBlock` values. If one of the sub matrix blocks that are being added is a SparseMatrix, the resulting sub matrix block will also be a SparseMatrix, even if it is being added to a DenseMatrix. If two dense sub matrix blocks are added, the output block will also be a DenseMatrix. >>> dm1 = Matrices.dense(3, 2, [1, 2, 3, 4, 5, 6]) >>> dm2 = Matrices.dense(3, 2, [7, 8, 9, 10, 11, 12]) >>> sm = Matrices.sparse(3, 2, [0, 1, 3], [0, 1, 2], [7, 11, 12]) >>> blocks1 = sc.parallelize([((0, 0), dm1), ((1, 0), dm2)]) >>> blocks2 = sc.parallelize([((0, 0), dm1), ((1, 0), dm2)]) >>> blocks3 = sc.parallelize([((0, 0), sm), ((1, 0), dm2)]) >>> mat1 = BlockMatrix(blocks1, 3, 2) >>> mat2 = BlockMatrix(blocks2, 3, 2) >>> mat3 = BlockMatrix(blocks3, 3, 2) >>> mat1.add(mat2).toLocalMatrix() DenseMatrix(6, 2, [2.0, 4.0, 6.0, 14.0, 16.0, 18.0, 8.0, 10.0, 12.0, 20.0, 22.0, 24.0], 0) >>> mat1.add(mat3).toLocalMatrix() DenseMatrix(6, 2, [8.0, 2.0, 3.0, 14.0, 16.0, 18.0, 4.0, 16.0, 18.0, 20.0, 22.0, 24.0], 0)
Adds two block matrices together. The matrices must have the same size and matching `rowsPerBlock` and `colsPerBlock` values. If one of the sub matrix blocks that are being added is a SparseMatrix, the resulting sub matrix block will also be a SparseMatrix, even if it is being added to a DenseMatrix. If two dense sub matrix blocks are added, the output block will also be a DenseMatrix.
[ "Adds", "two", "block", "matrices", "together", ".", "The", "matrices", "must", "have", "the", "same", "size", "and", "matching", "rowsPerBlock", "and", "colsPerBlock", "values", ".", "If", "one", "of", "the", "sub", "matrix", "blocks", "that", "are", "being", "added", "is", "a", "SparseMatrix", "the", "resulting", "sub", "matrix", "block", "will", "also", "be", "a", "SparseMatrix", "even", "if", "it", "is", "being", "added", "to", "a", "DenseMatrix", ".", "If", "two", "dense", "sub", "matrix", "blocks", "are", "added", "the", "output", "block", "will", "also", "be", "a", "DenseMatrix", "." ]
def add(self, other): """ Adds two block matrices together. The matrices must have the same size and matching `rowsPerBlock` and `colsPerBlock` values. If one of the sub matrix blocks that are being added is a SparseMatrix, the resulting sub matrix block will also be a SparseMatrix, even if it is being added to a DenseMatrix. If two dense sub matrix blocks are added, the output block will also be a DenseMatrix. >>> dm1 = Matrices.dense(3, 2, [1, 2, 3, 4, 5, 6]) >>> dm2 = Matrices.dense(3, 2, [7, 8, 9, 10, 11, 12]) >>> sm = Matrices.sparse(3, 2, [0, 1, 3], [0, 1, 2], [7, 11, 12]) >>> blocks1 = sc.parallelize([((0, 0), dm1), ((1, 0), dm2)]) >>> blocks2 = sc.parallelize([((0, 0), dm1), ((1, 0), dm2)]) >>> blocks3 = sc.parallelize([((0, 0), sm), ((1, 0), dm2)]) >>> mat1 = BlockMatrix(blocks1, 3, 2) >>> mat2 = BlockMatrix(blocks2, 3, 2) >>> mat3 = BlockMatrix(blocks3, 3, 2) >>> mat1.add(mat2).toLocalMatrix() DenseMatrix(6, 2, [2.0, 4.0, 6.0, 14.0, 16.0, 18.0, 8.0, 10.0, 12.0, 20.0, 22.0, 24.0], 0) >>> mat1.add(mat3).toLocalMatrix() DenseMatrix(6, 2, [8.0, 2.0, 3.0, 14.0, 16.0, 18.0, 4.0, 16.0, 18.0, 20.0, 22.0, 24.0], 0) """ if not isinstance(other, BlockMatrix): raise TypeError("Other should be a BlockMatrix, got %s" % type(other)) other_java_block_matrix = other._java_matrix_wrapper._java_model java_block_matrix = self._java_matrix_wrapper.call("add", other_java_block_matrix) return BlockMatrix(java_block_matrix, self.rowsPerBlock, self.colsPerBlock)
[ "def", "add", "(", "self", ",", "other", ")", ":", "if", "not", "isinstance", "(", "other", ",", "BlockMatrix", ")", ":", "raise", "TypeError", "(", "\"Other should be a BlockMatrix, got %s\"", "%", "type", "(", "other", ")", ")", "other_java_block_matrix", "=", "other", ".", "_java_matrix_wrapper", ".", "_java_model", "java_block_matrix", "=", "self", ".", "_java_matrix_wrapper", ".", "call", "(", "\"add\"", ",", "other_java_block_matrix", ")", "return", "BlockMatrix", "(", "java_block_matrix", ",", "self", ".", "rowsPerBlock", ",", "self", ".", "colsPerBlock", ")" ]
https://github.com/runawayhorse001/LearningApacheSpark/blob/67f3879dce17553195f094f5728b94a01badcf24/pyspark/mllib/linalg/distributed.py#L1186-L1217
xuanyuzhou98/SqueezeSegV2
9f02049466fd369398a94de091ca8d7e4fb6ae81
src/nn_skeleton.py
python
_variable_on_device
(name, shape, initializer, trainable=True)
return var
Helper to create a Variable. Args: name: name of the variable shape: list of ints initializer: initializer for Variable Returns: Variable Tensor
Helper to create a Variable.
[ "Helper", "to", "create", "a", "Variable", "." ]
def _variable_on_device(name, shape, initializer, trainable=True): """Helper to create a Variable. Args: name: name of the variable shape: list of ints initializer: initializer for Variable Returns: Variable Tensor """ # TODO(bichen): fix the hard-coded data type below dtype = tf.float32 if not callable(initializer): var = tf.get_variable(name, initializer=initializer, trainable=trainable) else: var = tf.get_variable( name, shape, initializer=initializer, dtype=dtype, trainable=trainable) return var
[ "def", "_variable_on_device", "(", "name", ",", "shape", ",", "initializer", ",", "trainable", "=", "True", ")", ":", "# TODO(bichen): fix the hard-coded data type below", "dtype", "=", "tf", ".", "float32", "if", "not", "callable", "(", "initializer", ")", ":", "var", "=", "tf", ".", "get_variable", "(", "name", ",", "initializer", "=", "initializer", ",", "trainable", "=", "trainable", ")", "else", ":", "var", "=", "tf", ".", "get_variable", "(", "name", ",", "shape", ",", "initializer", "=", "initializer", ",", "dtype", "=", "dtype", ",", "trainable", "=", "trainable", ")", "return", "var" ]
https://github.com/xuanyuzhou98/SqueezeSegV2/blob/9f02049466fd369398a94de091ca8d7e4fb6ae81/src/nn_skeleton.py#L16-L34
Chaffelson/nipyapi
d3b186fd701ce308c2812746d98af9120955e810
nipyapi/nifi/models/cluster_summary_dto.py
python
ClusterSummaryDTO.connected_to_cluster
(self)
return self._connected_to_cluster
Gets the connected_to_cluster of this ClusterSummaryDTO. Whether this NiFi instance is connected to a cluster. :return: The connected_to_cluster of this ClusterSummaryDTO. :rtype: bool
Gets the connected_to_cluster of this ClusterSummaryDTO. Whether this NiFi instance is connected to a cluster.
[ "Gets", "the", "connected_to_cluster", "of", "this", "ClusterSummaryDTO", ".", "Whether", "this", "NiFi", "instance", "is", "connected", "to", "a", "cluster", "." ]
def connected_to_cluster(self): """ Gets the connected_to_cluster of this ClusterSummaryDTO. Whether this NiFi instance is connected to a cluster. :return: The connected_to_cluster of this ClusterSummaryDTO. :rtype: bool """ return self._connected_to_cluster
[ "def", "connected_to_cluster", "(", "self", ")", ":", "return", "self", ".", "_connected_to_cluster" ]
https://github.com/Chaffelson/nipyapi/blob/d3b186fd701ce308c2812746d98af9120955e810/nipyapi/nifi/models/cluster_summary_dto.py#L141-L149
google/clusterfuzz
f358af24f414daa17a3649b143e71ea71871ef59
src/clusterfuzz/_internal/datastore/data_types.py
python
FuzzTarget.fully_qualified_name
(self)
return fuzz_target_fully_qualified_name(self.engine, self.project, self.binary)
Get the fully qualified name for this fuzz target.
Get the fully qualified name for this fuzz target.
[ "Get", "the", "fully", "qualified", "name", "for", "this", "fuzz", "target", "." ]
def fully_qualified_name(self): """Get the fully qualified name for this fuzz target.""" return fuzz_target_fully_qualified_name(self.engine, self.project, self.binary)
[ "def", "fully_qualified_name", "(", "self", ")", ":", "return", "fuzz_target_fully_qualified_name", "(", "self", ".", "engine", ",", "self", ".", "project", ",", "self", ".", "binary", ")" ]
https://github.com/google/clusterfuzz/blob/f358af24f414daa17a3649b143e71ea71871ef59/src/clusterfuzz/_internal/datastore/data_types.py#L1192-L1195
Instagram/LibCST
13370227703fe3171e94c57bdd7977f3af696b73
libcst/_parser/py_whitespace_parser.py
python
_parse_indent
( config: BaseWhitespaceParserConfig, state: State, *, override_absolute_indent: Optional[str] = None, )
return False
Returns True if indentation was found, otherwise False.
Returns True if indentation was found, otherwise False.
[ "Returns", "True", "if", "indentation", "was", "found", "otherwise", "False", "." ]
def _parse_indent( config: BaseWhitespaceParserConfig, state: State, *, override_absolute_indent: Optional[str] = None, ) -> bool: """ Returns True if indentation was found, otherwise False. """ absolute_indent = ( override_absolute_indent if override_absolute_indent is not None else state.absolute_indent ) line_str = config.lines[state.line - 1] if state.column != 0: if state.column == len(line_str) and state.line == len(config.lines): # We're at EOF, treat this as a failed speculative parse return False raise Exception("Internal Error: Column should be 0 when parsing an indent.") if line_str.startswith(absolute_indent, state.column): state.column += len(absolute_indent) return True return False
[ "def", "_parse_indent", "(", "config", ":", "BaseWhitespaceParserConfig", ",", "state", ":", "State", ",", "*", ",", "override_absolute_indent", ":", "Optional", "[", "str", "]", "=", "None", ",", ")", "->", "bool", ":", "absolute_indent", "=", "(", "override_absolute_indent", "if", "override_absolute_indent", "is", "not", "None", "else", "state", ".", "absolute_indent", ")", "line_str", "=", "config", ".", "lines", "[", "state", ".", "line", "-", "1", "]", "if", "state", ".", "column", "!=", "0", ":", "if", "state", ".", "column", "==", "len", "(", "line_str", ")", "and", "state", ".", "line", "==", "len", "(", "config", ".", "lines", ")", ":", "# We're at EOF, treat this as a failed speculative parse", "return", "False", "raise", "Exception", "(", "\"Internal Error: Column should be 0 when parsing an indent.\"", ")", "if", "line_str", ".", "startswith", "(", "absolute_indent", ",", "state", ".", "column", ")", ":", "state", ".", "column", "+=", "len", "(", "absolute_indent", ")", "return", "True", "return", "False" ]
https://github.com/Instagram/LibCST/blob/13370227703fe3171e94c57bdd7977f3af696b73/libcst/_parser/py_whitespace_parser.py#L161-L184
fjarri/reikna
e32e507d74337c13c508ae9ff5f0716a99798a61
reikna/cluda/api.py
python
Kernel.prepare
(self, global_size, local_size=None, local_mem=0)
Prepare the kernel for execution with given parameters. :param global_size: an integer or a tuple of integers, specifying total number of work items to run. :param local_size: an integer or a tuple of integers, specifying the size of a single work group. Should have the same number of dimensions as ``global_size``. If ``None`` is passed, some ``local_size`` will be picked internally. :param local_mem: (**CUDA API only**) amount of dynamic local memory (in bytes)
Prepare the kernel for execution with given parameters.
[ "Prepare", "the", "kernel", "for", "execution", "with", "given", "parameters", "." ]
def prepare(self, global_size, local_size=None, local_mem=0): """ Prepare the kernel for execution with given parameters. :param global_size: an integer or a tuple of integers, specifying total number of work items to run. :param local_size: an integer or a tuple of integers, specifying the size of a single work group. Should have the same number of dimensions as ``global_size``. If ``None`` is passed, some ``local_size`` will be picked internally. :param local_mem: (**CUDA API only**) amount of dynamic local memory (in bytes) """ raise NotImplementedError()
[ "def", "prepare", "(", "self", ",", "global_size", ",", "local_size", "=", "None", ",", "local_mem", "=", "0", ")", ":", "raise", "NotImplementedError", "(", ")" ]
https://github.com/fjarri/reikna/blob/e32e507d74337c13c508ae9ff5f0716a99798a61/reikna/cluda/api.py#L695-L707
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/bokeh-1.4.0-py3.7.egg/bokeh/models/plots.py
python
Plot.column
(self, col, gridplot)
return self in gridplot.column(col)
Return whether this plot is in a given column of a GridPlot. Args: col (int) : index of the column to test gridplot (GridPlot) : the GridPlot to check Returns: bool
Return whether this plot is in a given column of a GridPlot.
[ "Return", "whether", "this", "plot", "is", "in", "a", "given", "column", "of", "a", "GridPlot", "." ]
def column(self, col, gridplot): ''' Return whether this plot is in a given column of a GridPlot. Args: col (int) : index of the column to test gridplot (GridPlot) : the GridPlot to check Returns: bool ''' return self in gridplot.column(col)
[ "def", "column", "(", "self", ",", "col", ",", "gridplot", ")", ":", "return", "self", "in", "gridplot", ".", "column", "(", "col", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/bokeh-1.4.0-py3.7.egg/bokeh/models/plots.py#L139-L150
nodesign/weio
1d67d705a5c36a2e825ad13feab910b0aca9a2e8
updateMaker/prepareForWeIOServer/prepareUpdate.py
python
saveConfiguration
(path, conf)
[]
def saveConfiguration(path, conf): inputFile = open(path+"/updateWeio.json", 'w') print(inputFile) ret = inputFile.write(json.dumps(conf, indent=4, sort_keys=True)) inputFile.close()
[ "def", "saveConfiguration", "(", "path", ",", "conf", ")", ":", "inputFile", "=", "open", "(", "path", "+", "\"/updateWeio.json\"", ",", "'w'", ")", "print", "(", "inputFile", ")", "ret", "=", "inputFile", ".", "write", "(", "json", ".", "dumps", "(", "conf", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ")", ")", "inputFile", ".", "close", "(", ")" ]
https://github.com/nodesign/weio/blob/1d67d705a5c36a2e825ad13feab910b0aca9a2e8/updateMaker/prepareForWeIOServer/prepareUpdate.py#L17-L21
nortikin/sverchok
7b460f01317c15f2681bfa3e337c5e7346f3711b
nodes/number/curve_mapper.py
python
SvCurveMapperNode.save_to_json
(self, node_data: dict)
function to set data for exporting json
function to set data for exporting json
[ "function", "to", "set", "data", "for", "exporting", "json" ]
def save_to_json(self, node_data: dict): '''function to set data for exporting json''' curve_node_name = self._get_curve_node_name() data = get_rgb_curve(node_group_name, curve_node_name) data_json_str = json.dumps(data) node_data['curve_data'] = data_json_str
[ "def", "save_to_json", "(", "self", ",", "node_data", ":", "dict", ")", ":", "curve_node_name", "=", "self", ".", "_get_curve_node_name", "(", ")", "data", "=", "get_rgb_curve", "(", "node_group_name", ",", "curve_node_name", ")", "data_json_str", "=", "json", ".", "dumps", "(", "data", ")", "node_data", "[", "'curve_data'", "]", "=", "data_json_str" ]
https://github.com/nortikin/sverchok/blob/7b460f01317c15f2681bfa3e337c5e7346f3711b/nodes/number/curve_mapper.py#L138-L143
cobbler/cobbler
eed8cdca3e970c8aa1d199e80b8c8f19b3f940cc
cobbler/api.py
python
CobblerAPI.mkloaders
(self)
Create the GRUB installer images via this API call. It utilizes ``grub2-mkimage`` behind the curtain.
Create the GRUB installer images via this API call. It utilizes ``grub2-mkimage`` behind the curtain.
[ "Create", "the", "GRUB", "installer", "images", "via", "this", "API", "call", ".", "It", "utilizes", "grub2", "-", "mkimage", "behind", "the", "curtain", "." ]
def mkloaders(self): """ Create the GRUB installer images via this API call. It utilizes ``grub2-mkimage`` behind the curtain. """ action = mkloaders.MkLoaders(self) action.run()
[ "def", "mkloaders", "(", "self", ")", ":", "action", "=", "mkloaders", ".", "MkLoaders", "(", "self", ")", "action", ".", "run", "(", ")" ]
https://github.com/cobbler/cobbler/blob/eed8cdca3e970c8aa1d199e80b8c8f19b3f940cc/cobbler/api.py#L1873-L1878
tensorflow/tensor2tensor
2a33b152d7835af66a6d20afe7961751047e28dd
tensor2tensor/models/research/attention_lm_moe.py
python
attention_lm_no_moe_small
()
return hparams
Without the mixture of experts (for comparison). on lm1b_32k: ~45M params 2 steps/sec on [GeForce GTX TITAN X] After 50K steps on 8 GPUs (synchronous): eval_log_ppl_per_token = 3.51 Returns: an hparams object.
Without the mixture of experts (for comparison).
[ "Without", "the", "mixture", "of", "experts", "(", "for", "comparison", ")", "." ]
def attention_lm_no_moe_small(): """Without the mixture of experts (for comparison). on lm1b_32k: ~45M params 2 steps/sec on [GeForce GTX TITAN X] After 50K steps on 8 GPUs (synchronous): eval_log_ppl_per_token = 3.51 Returns: an hparams object. """ hparams = attention_lm_moe_small() hparams.moe_layers = "" return hparams
[ "def", "attention_lm_no_moe_small", "(", ")", ":", "hparams", "=", "attention_lm_moe_small", "(", ")", "hparams", ".", "moe_layers", "=", "\"\"", "return", "hparams" ]
https://github.com/tensorflow/tensor2tensor/blob/2a33b152d7835af66a6d20afe7961751047e28dd/tensor2tensor/models/research/attention_lm_moe.py#L681-L695
marcosfede/algorithms
1ee7c815f9d556c9cef4d4b0d21ee3a409d21629
adventofcode/2019/d19/d19.py
python
Point.__mul__
(self, num)
return Point(self.x*num, self.y*num)
[]
def __mul__(self, num): return Point(self.x*num, self.y*num)
[ "def", "__mul__", "(", "self", ",", "num", ")", ":", "return", "Point", "(", "self", ".", "x", "*", "num", ",", "self", ".", "y", "*", "num", ")" ]
https://github.com/marcosfede/algorithms/blob/1ee7c815f9d556c9cef4d4b0d21ee3a409d21629/adventofcode/2019/d19/d19.py#L91-L92
radlab/sparrow
afb8efadeb88524f1394d1abe4ea66c6fd2ac744
src/main/python/parse_logs.py
python
Task.service_time
(self)
return (self.completion_time - self.node_monitor_launch_time)
Returns the service time (time executing on backend).
Returns the service time (time executing on backend).
[ "Returns", "the", "service", "time", "(", "time", "executing", "on", "backend", ")", "." ]
def service_time(self): """ Returns the service time (time executing on backend).""" #print self.node_monitor_address, self.completion_time - self.node_monitor_launch_time return (self.completion_time - self.node_monitor_launch_time)
[ "def", "service_time", "(", "self", ")", ":", "#print self.node_monitor_address, self.completion_time - self.node_monitor_launch_time", "return", "(", "self", ".", "completion_time", "-", "self", ".", "node_monitor_launch_time", ")" ]
https://github.com/radlab/sparrow/blob/afb8efadeb88524f1394d1abe4ea66c6fd2ac744/src/main/python/parse_logs.py#L128-L131
tp4a/teleport
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/ssh.py
python
_ssh_read_next_mpint
(data)
return ( utils.int_from_bytes(mpint_data, byteorder='big', signed=False), rest )
Reads the next mpint from the data. Currently, all mpints are interpreted as unsigned.
Reads the next mpint from the data.
[ "Reads", "the", "next", "mpint", "from", "the", "data", "." ]
def _ssh_read_next_mpint(data): """ Reads the next mpint from the data. Currently, all mpints are interpreted as unsigned. """ mpint_data, rest = _ssh_read_next_string(data) return ( utils.int_from_bytes(mpint_data, byteorder='big', signed=False), rest )
[ "def", "_ssh_read_next_mpint", "(", "data", ")", ":", "mpint_data", ",", "rest", "=", "_ssh_read_next_string", "(", "data", ")", "return", "(", "utils", ".", "int_from_bytes", "(", "mpint_data", ",", "byteorder", "=", "'big'", ",", "signed", "=", "False", ")", ",", "rest", ")" ]
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-windows/x86/cryptography/hazmat/primitives/serialization/ssh.py#L132-L142
hahnyuan/nn_tools
e04903d2946a75b62128b64ada7eec8b9fbd841f
analysis/MxnetA.py
python
Monitor.install
(self, exe)
[]
def install(self, exe): exe.set_monitor_callback(self.stat_helper) self.exes.append(exe)
[ "def", "install", "(", "self", ",", "exe", ")", ":", "exe", ".", "set_monitor_callback", "(", "self", ".", "stat_helper", ")", "self", ".", "exes", ".", "append", "(", "exe", ")" ]
https://github.com/hahnyuan/nn_tools/blob/e04903d2946a75b62128b64ada7eec8b9fbd841f/analysis/MxnetA.py#L109-L111
chainer/chainercv
7159616642e0be7c5b3ef380b848e16b7e99355b
chainercv/chainer_experimental/datasets/sliceable/sliceable_dataset.py
python
SliceableDataset.get_example
(self, index)
[]
def get_example(self, index): if isinstance(self.keys, tuple): return self.get_example_by_keys( index, tuple(range(len(self.keys)))) else: return self.get_example_by_keys(index, (0,))[0]
[ "def", "get_example", "(", "self", ",", "index", ")", ":", "if", "isinstance", "(", "self", ".", "keys", ",", "tuple", ")", ":", "return", "self", ".", "get_example_by_keys", "(", "index", ",", "tuple", "(", "range", "(", "len", "(", "self", ".", "keys", ")", ")", ")", ")", "else", ":", "return", "self", ".", "get_example_by_keys", "(", "index", ",", "(", "0", ",", ")", ")", "[", "0", "]" ]
https://github.com/chainer/chainercv/blob/7159616642e0be7c5b3ef380b848e16b7e99355b/chainercv/chainer_experimental/datasets/sliceable/sliceable_dataset.py#L95-L100
danielzak/sl-quant
460a55c966192b2bde5497f8502ac24f81a51ddf
ex3-self_learning_quant.py
python
get_reward
(new_state, time_step, action, xdata, signal, terminal_state, eval=False, epoch=0)
return reward
[]
def get_reward(new_state, time_step, action, xdata, signal, terminal_state, eval=False, epoch=0): reward = 0 signal.fillna(value=0, inplace=True) if eval == False: bt = twp.Backtest(pd.Series(data=[x for x in xdata[time_step-2:time_step]], index=signal[time_step-2:time_step].index.values), signal[time_step-2:time_step], signalType='shares') reward = ((bt.data['price'].iloc[-1] - bt.data['price'].iloc[-2])*bt.data['shares'].iloc[-1]) if terminal_state == 1 and eval == True: #save a figure of the test set bt = twp.Backtest(pd.Series(data=[x for x in xdata], index=signal.index.values), signal, signalType='shares') reward = bt.pnl.iloc[-1] plt.figure(figsize=(3,4)) bt.plotTrades() plt.axvline(x=400, color='black', linestyle='--') plt.text(250, 400, 'training data') plt.text(450, 400, 'test data') plt.suptitle(str(epoch)) plt.savefig('plt/'+str(epoch)+'.png', bbox_inches='tight', pad_inches=1, dpi=72) plt.close('all') #print(time_step, terminal_state, eval, reward) return reward
[ "def", "get_reward", "(", "new_state", ",", "time_step", ",", "action", ",", "xdata", ",", "signal", ",", "terminal_state", ",", "eval", "=", "False", ",", "epoch", "=", "0", ")", ":", "reward", "=", "0", "signal", ".", "fillna", "(", "value", "=", "0", ",", "inplace", "=", "True", ")", "if", "eval", "==", "False", ":", "bt", "=", "twp", ".", "Backtest", "(", "pd", ".", "Series", "(", "data", "=", "[", "x", "for", "x", "in", "xdata", "[", "time_step", "-", "2", ":", "time_step", "]", "]", ",", "index", "=", "signal", "[", "time_step", "-", "2", ":", "time_step", "]", ".", "index", ".", "values", ")", ",", "signal", "[", "time_step", "-", "2", ":", "time_step", "]", ",", "signalType", "=", "'shares'", ")", "reward", "=", "(", "(", "bt", ".", "data", "[", "'price'", "]", ".", "iloc", "[", "-", "1", "]", "-", "bt", ".", "data", "[", "'price'", "]", ".", "iloc", "[", "-", "2", "]", ")", "*", "bt", ".", "data", "[", "'shares'", "]", ".", "iloc", "[", "-", "1", "]", ")", "if", "terminal_state", "==", "1", "and", "eval", "==", "True", ":", "#save a figure of the test set", "bt", "=", "twp", ".", "Backtest", "(", "pd", ".", "Series", "(", "data", "=", "[", "x", "for", "x", "in", "xdata", "]", ",", "index", "=", "signal", ".", "index", ".", "values", ")", ",", "signal", ",", "signalType", "=", "'shares'", ")", "reward", "=", "bt", ".", "pnl", ".", "iloc", "[", "-", "1", "]", "plt", ".", "figure", "(", "figsize", "=", "(", "3", ",", "4", ")", ")", "bt", ".", "plotTrades", "(", ")", "plt", ".", "axvline", "(", "x", "=", "400", ",", "color", "=", "'black'", ",", "linestyle", "=", "'--'", ")", "plt", ".", "text", "(", "250", ",", "400", ",", "'training data'", ")", "plt", ".", "text", "(", "450", ",", "400", ",", "'test data'", ")", "plt", ".", "suptitle", "(", "str", "(", "epoch", ")", ")", "plt", ".", "savefig", "(", "'plt/'", "+", "str", "(", "epoch", ")", "+", "'.png'", ",", "bbox_inches", "=", "'tight'", ",", "pad_inches", "=", "1", ",", "dpi", "=", "72", ")", "plt", ".", "close", "(", "'all'", ")", "#print(time_step, terminal_state, eval, reward)", "return", "reward" ]
https://github.com/danielzak/sl-quant/blob/460a55c966192b2bde5497f8502ac24f81a51ddf/ex3-self_learning_quant.py#L123-L145
graphcore/examples
46d2b7687b829778369fc6328170a7b14761e5c6
code_examples/tensorflow2/abc_covid_19/ABC_IPU.py
python
main
()
Warmup, timing, and stats output handling.
Warmup, timing, and stats output handling.
[ "Warmup", "timing", "and", "stats", "output", "handling", "." ]
def main(): """Warmup, timing, and stats output handling.""" with strategy.scope(): # Warm-up if not args.sparse_output: print("Warming up...") strategy.run( loop_collect_samples, [args.n_samples_target, tf.constant(1, dtype=tf.int32), args.tolerance]) if not args.no_outfeed_ops: outfeed_data.dequeue() # Time the compute if not args.sparse_output: print("Running...") start_time = time.time() num_accepted_samples, num_runs = strategy.run( loop_collect_samples, [args.n_samples_target, args.max_n_runs, args.tolerance]) end_time = time.time() samples_collected = np.int(num_accepted_samples) num_runs = np.int(num_runs) run_duration = end_time - start_time # Dequeue the data if args.no_outfeed_ops: start_time = end_time = time.time() else: start_time = time.time() param_vector, reduced_distances, eval_param_vector = \ dequeue_and_postproc(time_it=True) end_time = time.time() deq_proc_duration = end_time - start_time duration = run_duration + deq_proc_duration if args.sparse_output: print(f"{duration:.3f} \t {1e3*duration/num_runs:.3f} \t " f"{run_duration:.3f} \t {1e3*run_duration/num_runs:.3f}") else: print(f"Running ABC inference for {args.country}\n" f"\tBatch size: {args.n_samples_per_batch}\n" f"\tTolerance: {args.tolerance}" f"\tTarget number of samples: {args.n_samples_target}" f"\tEnqueue chunk size: {args.enqueue_chunk_size}") print("=========================================") print("IPU runs completed in {0:.3f} seconds\n".format( run_duration)) print(f"Samples collected: {samples_collected:.0f}") print(f"Number of runs: {num_runs:.0f} " f"with {args.replication_factor} replica(s)") print("Time per run: {0:.3f} milliseconds\n".format( 1e3*run_duration/num_runs)) print("Debug: Time for dequeue and processing: " "{0:.3f} second\n".format(deq_proc_duration)) print("Debug: Total Time (inc dequeue): {0:.3f} second\n".format( duration)) print("Debug: Time per run (inc dequeue): " "{0:.3f} milliseconds\n".format(1e3*duration/num_runs)) if not args.no_outfeed_ops: print(f"param_vector.shape = {param_vector.shape}") print(f"reduced_distances.shape = {reduced_distances.shape}") print(f"eval_param_vector.shape = {eval_param_vector.shape}") if samples_collected < args.n_samples_target: raise NotImplementedError( "Too few iterations. Increase max_num_runs parameter.") if args.samples_filepath: # Save the accepted samples if filepath given np.savetxt(args.samples_filepath, eval_param_vector.numpy(), delimiter=",")
[ "def", "main", "(", ")", ":", "with", "strategy", ".", "scope", "(", ")", ":", "# Warm-up", "if", "not", "args", ".", "sparse_output", ":", "print", "(", "\"Warming up...\"", ")", "strategy", ".", "run", "(", "loop_collect_samples", ",", "[", "args", ".", "n_samples_target", ",", "tf", ".", "constant", "(", "1", ",", "dtype", "=", "tf", ".", "int32", ")", ",", "args", ".", "tolerance", "]", ")", "if", "not", "args", ".", "no_outfeed_ops", ":", "outfeed_data", ".", "dequeue", "(", ")", "# Time the compute", "if", "not", "args", ".", "sparse_output", ":", "print", "(", "\"Running...\"", ")", "start_time", "=", "time", ".", "time", "(", ")", "num_accepted_samples", ",", "num_runs", "=", "strategy", ".", "run", "(", "loop_collect_samples", ",", "[", "args", ".", "n_samples_target", ",", "args", ".", "max_n_runs", ",", "args", ".", "tolerance", "]", ")", "end_time", "=", "time", ".", "time", "(", ")", "samples_collected", "=", "np", ".", "int", "(", "num_accepted_samples", ")", "num_runs", "=", "np", ".", "int", "(", "num_runs", ")", "run_duration", "=", "end_time", "-", "start_time", "# Dequeue the data", "if", "args", ".", "no_outfeed_ops", ":", "start_time", "=", "end_time", "=", "time", ".", "time", "(", ")", "else", ":", "start_time", "=", "time", ".", "time", "(", ")", "param_vector", ",", "reduced_distances", ",", "eval_param_vector", "=", "dequeue_and_postproc", "(", "time_it", "=", "True", ")", "end_time", "=", "time", ".", "time", "(", ")", "deq_proc_duration", "=", "end_time", "-", "start_time", "duration", "=", "run_duration", "+", "deq_proc_duration", "if", "args", ".", "sparse_output", ":", "print", "(", "f\"{duration:.3f} \\t {1e3*duration/num_runs:.3f} \\t \"", "f\"{run_duration:.3f} \\t {1e3*run_duration/num_runs:.3f}\"", ")", "else", ":", "print", "(", "f\"Running ABC inference for {args.country}\\n\"", "f\"\\tBatch size: {args.n_samples_per_batch}\\n\"", "f\"\\tTolerance: {args.tolerance}\"", "f\"\\tTarget number of samples: {args.n_samples_target}\"", "f\"\\tEnqueue chunk size: {args.enqueue_chunk_size}\"", ")", "print", "(", "\"=========================================\"", ")", "print", "(", "\"IPU runs completed in {0:.3f} seconds\\n\"", ".", "format", "(", "run_duration", ")", ")", "print", "(", "f\"Samples collected: {samples_collected:.0f}\"", ")", "print", "(", "f\"Number of runs: {num_runs:.0f} \"", "f\"with {args.replication_factor} replica(s)\"", ")", "print", "(", "\"Time per run: {0:.3f} milliseconds\\n\"", ".", "format", "(", "1e3", "*", "run_duration", "/", "num_runs", ")", ")", "print", "(", "\"Debug: Time for dequeue and processing: \"", "\"{0:.3f} second\\n\"", ".", "format", "(", "deq_proc_duration", ")", ")", "print", "(", "\"Debug: Total Time (inc dequeue): {0:.3f} second\\n\"", ".", "format", "(", "duration", ")", ")", "print", "(", "\"Debug: Time per run (inc dequeue): \"", "\"{0:.3f} milliseconds\\n\"", ".", "format", "(", "1e3", "*", "duration", "/", "num_runs", ")", ")", "if", "not", "args", ".", "no_outfeed_ops", ":", "print", "(", "f\"param_vector.shape = {param_vector.shape}\"", ")", "print", "(", "f\"reduced_distances.shape = {reduced_distances.shape}\"", ")", "print", "(", "f\"eval_param_vector.shape = {eval_param_vector.shape}\"", ")", "if", "samples_collected", "<", "args", ".", "n_samples_target", ":", "raise", "NotImplementedError", "(", "\"Too few iterations. Increase max_num_runs parameter.\"", ")", "if", "args", ".", "samples_filepath", ":", "# Save the accepted samples if filepath given", "np", ".", "savetxt", "(", "args", ".", "samples_filepath", ",", "eval_param_vector", ".", "numpy", "(", ")", ",", "delimiter", "=", "\",\"", ")" ]
https://github.com/graphcore/examples/blob/46d2b7687b829778369fc6328170a7b14761e5c6/code_examples/tensorflow2/abc_covid_19/ABC_IPU.py#L305-L380
edisonlz/fastor
342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3
base/site-packages/androguard/core/bytecodes/dvm.py
python
AnnotationElement.get_value
(self)
return self.value
Return the element value (EncodedValue) :rtype: a :class:`EncodedValue` object
Return the element value (EncodedValue)
[ "Return", "the", "element", "value", "(", "EncodedValue", ")" ]
def get_value(self) : """ Return the element value (EncodedValue) :rtype: a :class:`EncodedValue` object """ return self.value
[ "def", "get_value", "(", "self", ")", ":", "return", "self", ".", "value" ]
https://github.com/edisonlz/fastor/blob/342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3/base/site-packages/androguard/core/bytecodes/dvm.py#L1545-L1551
modin-project/modin
0d9d14e6669be3dd6bb3b72222dbe6a6dffe1bee
modin/core/dataframe/pandas/dataframe/dataframe.py
python
PandasDataframe._axes_lengths
(self)
return [self._row_lengths, self._column_widths]
Get a pair of row partitions lengths and column partitions widths. Returns ------- list The pair of row partitions lengths and column partitions widths.
Get a pair of row partitions lengths and column partitions widths.
[ "Get", "a", "pair", "of", "row", "partitions", "lengths", "and", "column", "partitions", "widths", "." ]
def _axes_lengths(self): """ Get a pair of row partitions lengths and column partitions widths. Returns ------- list The pair of row partitions lengths and column partitions widths. """ return [self._row_lengths, self._column_widths]
[ "def", "_axes_lengths", "(", "self", ")", ":", "return", "[", "self", ".", "_row_lengths", ",", "self", ".", "_column_widths", "]" ]
https://github.com/modin-project/modin/blob/0d9d14e6669be3dd6bb3b72222dbe6a6dffe1bee/modin/core/dataframe/pandas/dataframe/dataframe.py#L142-L151
Azure/azure-linux-extensions
a42ef718c746abab2b3c6a21da87b29e76364558
OmsAgent/omsagent.py
python
was_curl_found
(exit_code, output)
return True
Returns false if exit_code indicates that curl was not installed; this can occur when package lists need to be updated, or when some archives are out-of-date
Returns false if exit_code indicates that curl was not installed; this can occur when package lists need to be updated, or when some archives are out-of-date
[ "Returns", "false", "if", "exit_code", "indicates", "that", "curl", "was", "not", "installed", ";", "this", "can", "occur", "when", "package", "lists", "need", "to", "be", "updated", "or", "when", "some", "archives", "are", "out", "-", "of", "-", "date" ]
def was_curl_found(exit_code, output): """ Returns false if exit_code indicates that curl was not installed; this can occur when package lists need to be updated, or when some archives are out-of-date """ if exit_code is InstallErrorCurlNotInstalled: return False return True
[ "def", "was_curl_found", "(", "exit_code", ",", "output", ")", ":", "if", "exit_code", "is", "InstallErrorCurlNotInstalled", ":", "return", "False", "return", "True" ]
https://github.com/Azure/azure-linux-extensions/blob/a42ef718c746abab2b3c6a21da87b29e76364558/OmsAgent/omsagent.py#L1452-L1460
VITA-Group/FasterSeg
478b0265eb9ab626cfbe503ad16d2452878b38cc
latency/operations.py
python
FactorizedReduce.__init__
(self, C_in, C_out, stride=1, slimmable=True, width_mult_list=[1.])
[]
def __init__(self, C_in, C_out, stride=1, slimmable=True, width_mult_list=[1.]): super(FactorizedReduce, self).__init__() assert stride in [1, 2] assert C_out % 2 == 0 self.C_in = C_in self.C_out = C_out self.stride = stride self.slimmable = slimmable self.width_mult_list = width_mult_list self.ratio = (1., 1.) if stride == 1 and slimmable: self.conv1 = USConv2d(C_in, C_out, 1, stride=1, padding=0, bias=False, width_mult_list=width_mult_list) self.bn = USBatchNorm2d(C_out, width_mult_list) self.relu = nn.ReLU(inplace=True) elif stride == 2: self.relu = nn.ReLU(inplace=True) if slimmable: self.conv1 = USConv2d(C_in, C_out // 2, 1, stride=2, padding=0, bias=False, width_mult_list=width_mult_list) self.conv2 = USConv2d(C_in, C_out // 2, 1, stride=2, padding=0, bias=False, width_mult_list=width_mult_list) self.bn = USBatchNorm2d(C_out, width_mult_list) else: # TensorRT: [Slice]: slice is out of input range self.conv1 = nn.Conv2d(C_in, C_out, 1, stride=2, padding=0, bias=False) self.bn = nn.BatchNorm2d(C_out)
[ "def", "__init__", "(", "self", ",", "C_in", ",", "C_out", ",", "stride", "=", "1", ",", "slimmable", "=", "True", ",", "width_mult_list", "=", "[", "1.", "]", ")", ":", "super", "(", "FactorizedReduce", ",", "self", ")", ".", "__init__", "(", ")", "assert", "stride", "in", "[", "1", ",", "2", "]", "assert", "C_out", "%", "2", "==", "0", "self", ".", "C_in", "=", "C_in", "self", ".", "C_out", "=", "C_out", "self", ".", "stride", "=", "stride", "self", ".", "slimmable", "=", "slimmable", "self", ".", "width_mult_list", "=", "width_mult_list", "self", ".", "ratio", "=", "(", "1.", ",", "1.", ")", "if", "stride", "==", "1", "and", "slimmable", ":", "self", ".", "conv1", "=", "USConv2d", "(", "C_in", ",", "C_out", ",", "1", ",", "stride", "=", "1", ",", "padding", "=", "0", ",", "bias", "=", "False", ",", "width_mult_list", "=", "width_mult_list", ")", "self", ".", "bn", "=", "USBatchNorm2d", "(", "C_out", ",", "width_mult_list", ")", "self", ".", "relu", "=", "nn", ".", "ReLU", "(", "inplace", "=", "True", ")", "elif", "stride", "==", "2", ":", "self", ".", "relu", "=", "nn", ".", "ReLU", "(", "inplace", "=", "True", ")", "if", "slimmable", ":", "self", ".", "conv1", "=", "USConv2d", "(", "C_in", ",", "C_out", "//", "2", ",", "1", ",", "stride", "=", "2", ",", "padding", "=", "0", ",", "bias", "=", "False", ",", "width_mult_list", "=", "width_mult_list", ")", "self", ".", "conv2", "=", "USConv2d", "(", "C_in", ",", "C_out", "//", "2", ",", "1", ",", "stride", "=", "2", ",", "padding", "=", "0", ",", "bias", "=", "False", ",", "width_mult_list", "=", "width_mult_list", ")", "self", ".", "bn", "=", "USBatchNorm2d", "(", "C_out", ",", "width_mult_list", ")", "else", ":", "# TensorRT: [Slice]: slice is out of input range", "self", ".", "conv1", "=", "nn", ".", "Conv2d", "(", "C_in", ",", "C_out", ",", "1", ",", "stride", "=", "2", ",", "padding", "=", "0", ",", "bias", "=", "False", ")", "self", ".", "bn", "=", "nn", ".", "BatchNorm2d", "(", "C_out", ")" ]
https://github.com/VITA-Group/FasterSeg/blob/478b0265eb9ab626cfbe503ad16d2452878b38cc/latency/operations.py#L440-L463
grow/grow
97fc21730b6a674d5d33948d94968e79447ce433
grow/common/rc_config.py
python
RCConfig.reset_update_check
(self)
Reset the timestamp of the last_checked.
Reset the timestamp of the last_checked.
[ "Reset", "the", "timestamp", "of", "the", "last_checked", "." ]
def reset_update_check(self): """Reset the timestamp of the last_checked.""" self.last_checked = self._time()
[ "def", "reset_update_check", "(", "self", ")", ":", "self", ".", "last_checked", "=", "self", ".", "_time", "(", ")" ]
https://github.com/grow/grow/blob/97fc21730b6a674d5d33948d94968e79447ce433/grow/common/rc_config.py#L76-L78
ArchiveBox/ArchiveBox
663918a37298d6b7617d1f36d346f95947c5bab2
archivebox/cli/__init__.py
python
list_subcommands
()
return dict(sorted(COMMANDS, key=display_order))
find and import all valid archivebox_<subcommand>.py files in CLI_DIR
find and import all valid archivebox_<subcommand>.py files in CLI_DIR
[ "find", "and", "import", "all", "valid", "archivebox_<subcommand", ">", ".", "py", "files", "in", "CLI_DIR" ]
def list_subcommands() -> Dict[str, str]: """find and import all valid archivebox_<subcommand>.py files in CLI_DIR""" COMMANDS = [] for filename in os.listdir(CLI_DIR): if is_cli_module(filename): subcommand = filename.replace('archivebox_', '').replace('.py', '') module = import_module('.archivebox_{}'.format(subcommand), __package__) assert is_valid_cli_module(module, subcommand) COMMANDS.append((subcommand, module.main.__doc__)) globals()[subcommand] = module.main display_order = lambda cmd: ( display_first.index(cmd[0]) if cmd[0] in display_first else 100 + len(cmd[0]) ) return dict(sorted(COMMANDS, key=display_order))
[ "def", "list_subcommands", "(", ")", "->", "Dict", "[", "str", ",", "str", "]", ":", "COMMANDS", "=", "[", "]", "for", "filename", "in", "os", ".", "listdir", "(", "CLI_DIR", ")", ":", "if", "is_cli_module", "(", "filename", ")", ":", "subcommand", "=", "filename", ".", "replace", "(", "'archivebox_'", ",", "''", ")", ".", "replace", "(", "'.py'", ",", "''", ")", "module", "=", "import_module", "(", "'.archivebox_{}'", ".", "format", "(", "subcommand", ")", ",", "__package__", ")", "assert", "is_valid_cli_module", "(", "module", ",", "subcommand", ")", "COMMANDS", ".", "append", "(", "(", "subcommand", ",", "module", ".", "main", ".", "__doc__", ")", ")", "globals", "(", ")", "[", "subcommand", "]", "=", "module", ".", "main", "display_order", "=", "lambda", "cmd", ":", "(", "display_first", ".", "index", "(", "cmd", "[", "0", "]", ")", "if", "cmd", "[", "0", "]", "in", "display_first", "else", "100", "+", "len", "(", "cmd", "[", "0", "]", ")", ")", "return", "dict", "(", "sorted", "(", "COMMANDS", ",", "key", "=", "display_order", ")", ")" ]
https://github.com/ArchiveBox/ArchiveBox/blob/663918a37298d6b7617d1f36d346f95947c5bab2/archivebox/cli/__init__.py#L36-L54
Qirky/FoxDot
76318f9630bede48ff3994146ed644affa27bfa4
FoxDot/lib/TimeVar.py
python
TimeVar.__radd__
(self, other)
return new
[]
def __radd__(self, other): new = self.math_op(other, "__radd__") if not isinstance(other, (TimeVar, int, float)): return new new = self.new(other) new.evaluate = fetch(rAdd) return new
[ "def", "__radd__", "(", "self", ",", "other", ")", ":", "new", "=", "self", ".", "math_op", "(", "other", ",", "\"__radd__\"", ")", "if", "not", "isinstance", "(", "other", ",", "(", "TimeVar", ",", "int", ",", "float", ")", ")", ":", "return", "new", "new", "=", "self", ".", "new", "(", "other", ")", "new", ".", "evaluate", "=", "fetch", "(", "rAdd", ")", "return", "new" ]
https://github.com/Qirky/FoxDot/blob/76318f9630bede48ff3994146ed644affa27bfa4/FoxDot/lib/TimeVar.py#L312-L318
out0fmemory/GoAgent-Always-Available
c4254984fea633ce3d1893fe5901debd9f22c2a9
server/lib/google/appengine/tools/appcfg.py
python
AppCfgApp.DeleteVersion
(self)
Deletes the specified version for an app.
Deletes the specified version for an app.
[ "Deletes", "the", "specified", "version", "for", "an", "app", "." ]
def DeleteVersion(self): """Deletes the specified version for an app.""" if not (self.options.app_id and self.options.version): self.parser.error('Expected an <app_id> argument, a <version> argument ' 'and an optional <module> argument.') if self.options.module: module = self.options.module else: module = '' rpcserver = self._GetRpcServer() response = rpcserver.Send('/api/versions/delete', app_id=self.options.app_id, version_match=self.options.version, module=module) print >> self.out_fh, response
[ "def", "DeleteVersion", "(", "self", ")", ":", "if", "not", "(", "self", ".", "options", ".", "app_id", "and", "self", ".", "options", ".", "version", ")", ":", "self", ".", "parser", ".", "error", "(", "'Expected an <app_id> argument, a <version> argument '", "'and an optional <module> argument.'", ")", "if", "self", ".", "options", ".", "module", ":", "module", "=", "self", ".", "options", ".", "module", "else", ":", "module", "=", "''", "rpcserver", "=", "self", ".", "_GetRpcServer", "(", ")", "response", "=", "rpcserver", ".", "Send", "(", "'/api/versions/delete'", ",", "app_id", "=", "self", ".", "options", ".", "app_id", ",", "version_match", "=", "self", ".", "options", ".", "version", ",", "module", "=", "module", ")", "print", ">>", "self", ".", "out_fh", ",", "response" ]
https://github.com/out0fmemory/GoAgent-Always-Available/blob/c4254984fea633ce3d1893fe5901debd9f22c2a9/server/lib/google/appengine/tools/appcfg.py#L4211-L4227
dmlc/dgl
8d14a739bc9e446d6c92ef83eafe5782398118de
python/dgl/_deprecate/graph.py
python
DGLGraph.clear_cache
(self)
Clear all cached graph structures such as adjmat. By default, all graph structure related sparse matrices (e.g. adjmat, incmat) are cached so they could be reused with the cost of extra memory consumption. This function can be used to clear the cached matrices if memory is an issue.
Clear all cached graph structures such as adjmat.
[ "Clear", "all", "cached", "graph", "structures", "such", "as", "adjmat", "." ]
def clear_cache(self): """Clear all cached graph structures such as adjmat. By default, all graph structure related sparse matrices (e.g. adjmat, incmat) are cached so they could be reused with the cost of extra memory consumption. This function can be used to clear the cached matrices if memory is an issue. """ self._graph.clear_cache()
[ "def", "clear_cache", "(", "self", ")", ":", "self", ".", "_graph", ".", "clear_cache", "(", ")" ]
https://github.com/dmlc/dgl/blob/8d14a739bc9e446d6c92ef83eafe5782398118de/python/dgl/_deprecate/graph.py#L1743-L1750
enthought/chaco
0907d1dedd07a499202efbaf2fe2a4e51b4c8e5f
chaco/tools/dataprinter.py
python
DataPrinter._build_text_from_event
(self, event)
return self.format % (x, y)
Build the text to display from the mouse event.
Build the text to display from the mouse event.
[ "Build", "the", "text", "to", "display", "from", "the", "mouse", "event", "." ]
def _build_text_from_event(self, event): """Build the text to display from the mouse event.""" plot = self.component ndx = plot.map_index((event.x, event.y), index_only=True) x = plot.index.get_data()[ndx] y = plot.value.get_data()[ndx] return self.format % (x, y)
[ "def", "_build_text_from_event", "(", "self", ",", "event", ")", ":", "plot", "=", "self", ".", "component", "ndx", "=", "plot", ".", "map_index", "(", "(", "event", ".", "x", ",", "event", ".", "y", ")", ",", "index_only", "=", "True", ")", "x", "=", "plot", ".", "index", ".", "get_data", "(", ")", "[", "ndx", "]", "y", "=", "plot", ".", "value", ".", "get_data", "(", ")", "[", "ndx", "]", "return", "self", ".", "format", "%", "(", "x", ",", "y", ")" ]
https://github.com/enthought/chaco/blob/0907d1dedd07a499202efbaf2fe2a4e51b4c8e5f/chaco/tools/dataprinter.py#L51-L57
ahmetcemturan/SFACT
7576e29ba72b33e5058049b77b7b558875542747
skeinforge_application/skeinforge_plugins/craft_plugins/fill.py
python
YIntersectionPath.__repr__
(self)
return '%s, %s, %s' % ( self.pathIndex, self.pointIndex, self.y )
Get the string representation of this y intersection.
Get the string representation of this y intersection.
[ "Get", "the", "string", "representation", "of", "this", "y", "intersection", "." ]
def __repr__(self): 'Get the string representation of this y intersection.' return '%s, %s, %s' % ( self.pathIndex, self.pointIndex, self.y )
[ "def", "__repr__", "(", "self", ")", ":", "return", "'%s, %s, %s'", "%", "(", "self", ".", "pathIndex", ",", "self", ".", "pointIndex", ",", "self", ".", "y", ")" ]
https://github.com/ahmetcemturan/SFACT/blob/7576e29ba72b33e5058049b77b7b558875542747/skeinforge_application/skeinforge_plugins/craft_plugins/fill.py#L1384-L1386
IronLanguages/main
a949455434b1fda8c783289e897e78a9a0caabb5
External.LCA_RESTRICTED/Languages/IronPython/27/Doc/jinja2/environment.py
python
Template.debug_info
(self)
return [tuple(map(int, x.split('='))) for x in self._debug_info.split('&')]
The debug info mapping.
The debug info mapping.
[ "The", "debug", "info", "mapping", "." ]
def debug_info(self): """The debug info mapping.""" return [tuple(map(int, x.split('='))) for x in self._debug_info.split('&')]
[ "def", "debug_info", "(", "self", ")", ":", "return", "[", "tuple", "(", "map", "(", "int", ",", "x", ".", "split", "(", "'='", ")", ")", ")", "for", "x", "in", "self", ".", "_debug_info", ".", "split", "(", "'&'", ")", "]" ]
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/IronPython/27/Doc/jinja2/environment.py#L749-L752
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/bokeh-1.4.0-py3.7.egg/bokeh/core/property/descriptors.py
python
PropertyDescriptor.__str__
(self)
return "PropertyDescriptor(%s)" % (self.name)
Basic string representation of ``PropertyDescriptor``. **Subclasses must implement this to serve their specific needs.**
Basic string representation of ``PropertyDescriptor``.
[ "Basic", "string", "representation", "of", "PropertyDescriptor", "." ]
def __str__(self): ''' Basic string representation of ``PropertyDescriptor``. **Subclasses must implement this to serve their specific needs.** ''' return "PropertyDescriptor(%s)" % (self.name)
[ "def", "__str__", "(", "self", ")", ":", "return", "\"PropertyDescriptor(%s)\"", "%", "(", "self", ".", "name", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/bokeh-1.4.0-py3.7.egg/bokeh/core/property/descriptors.py#L137-L143
pyscf/pyscf
0adfb464333f5ceee07b664f291d4084801bae64
pyscf/lib/linalg_helper.py
python
krylov
(aop, b, x0=None, tol=1e-10, max_cycle=30, dot=numpy.dot, lindep=DSOLVE_LINDEP, callback=None, hermi=False, max_memory=MAX_MEMORY, verbose=logger.WARN)
return x
r'''Krylov subspace method to solve (1+a) x = b. Ref: J. A. Pople et al, Int. J. Quantum. Chem. Symp. 13, 225 (1979). Args: aop : function(x) => array_like_x aop(x) to mimic the matrix vector multiplication :math:`\sum_{j}a_{ij} x_j`. The argument is a 1D array. The returned value is a 1D array. b : a vector or a list of vectors Kwargs: x0 : 1D array Initial guess tol : float Tolerance to terminate the operation aop(x). max_cycle : int max number of iterations. lindep : float Linear dependency threshold. The function is terminated when the smallest eigenvalue of the metric of the trial vectors is lower than this threshold. dot : function(x, y) => scalar Inner product callback : function(envs_dict) => None callback function takes one dict as the argument which is generated by the builtin function :func:`locals`, so that the callback function can access all local variables in the current envrionment. Returns: x : ndarray like b Examples: >>> from pyscf import lib >>> a = numpy.random.random((10,10)) * 1e-2 >>> b = numpy.random.random(10) >>> aop = lambda x: numpy.dot(a,x) >>> x = lib.krylov(aop, b) >>> numpy.allclose(numpy.dot(a,x)+x, b) True
r'''Krylov subspace method to solve (1+a) x = b. Ref: J. A. Pople et al, Int. J. Quantum. Chem. Symp. 13, 225 (1979).
[ "r", "Krylov", "subspace", "method", "to", "solve", "(", "1", "+", "a", ")", "x", "=", "b", ".", "Ref", ":", "J", ".", "A", ".", "Pople", "et", "al", "Int", ".", "J", ".", "Quantum", ".", "Chem", ".", "Symp", ".", "13", "225", "(", "1979", ")", "." ]
def krylov(aop, b, x0=None, tol=1e-10, max_cycle=30, dot=numpy.dot, lindep=DSOLVE_LINDEP, callback=None, hermi=False, max_memory=MAX_MEMORY, verbose=logger.WARN): r'''Krylov subspace method to solve (1+a) x = b. Ref: J. A. Pople et al, Int. J. Quantum. Chem. Symp. 13, 225 (1979). Args: aop : function(x) => array_like_x aop(x) to mimic the matrix vector multiplication :math:`\sum_{j}a_{ij} x_j`. The argument is a 1D array. The returned value is a 1D array. b : a vector or a list of vectors Kwargs: x0 : 1D array Initial guess tol : float Tolerance to terminate the operation aop(x). max_cycle : int max number of iterations. lindep : float Linear dependency threshold. The function is terminated when the smallest eigenvalue of the metric of the trial vectors is lower than this threshold. dot : function(x, y) => scalar Inner product callback : function(envs_dict) => None callback function takes one dict as the argument which is generated by the builtin function :func:`locals`, so that the callback function can access all local variables in the current envrionment. Returns: x : ndarray like b Examples: >>> from pyscf import lib >>> a = numpy.random.random((10,10)) * 1e-2 >>> b = numpy.random.random(10) >>> aop = lambda x: numpy.dot(a,x) >>> x = lib.krylov(aop, b) >>> numpy.allclose(numpy.dot(a,x)+x, b) True ''' if isinstance(aop, numpy.ndarray) and aop.ndim == 2: return numpy.linalg.solve(aop+numpy.eye(aop.shape[0]), b) if isinstance(verbose, logger.Logger): log = verbose else: log = logger.Logger(sys.stdout, verbose) if not (isinstance(b, numpy.ndarray) and b.ndim == 1): b = numpy.asarray(b) if x0 is None: x1 = b else: b = b - (x0 + aop(x0)) x1 = b if x1.ndim == 1: x1 = x1.reshape(1, x1.size) nroots, ndim = x1.shape # Not exactly QR, vectors are orthogonal but not normalized x1, rmat = _qr(x1, dot, lindep) for i in range(len(x1)): x1[i] *= rmat[i,i] innerprod = [dot(xi.conj(), xi).real for xi in x1] if innerprod: max_innerprod = max(innerprod) else: max_innerprod = 0 if max_innerprod < lindep or max_innerprod < tol**2: if x0 is None: return numpy.zeros_like(b) else: return x0 _incore = max_memory*1e6/b.nbytes > 14 log.debug1('max_memory %d incore %s', max_memory, _incore) if _incore: xs = [] ax = [] else: xs = _Xlist() ax = _Xlist() max_cycle = min(max_cycle, ndim) for cycle in range(max_cycle): axt = aop(x1) if axt.ndim == 1: axt = axt.reshape(1,ndim) xs.extend(x1) ax.extend(axt) if callable(callback): callback(cycle, xs, ax) x1 = axt.copy() for i in range(len(xs)): xsi = numpy.asarray(xs[i]) for j, axj in enumerate(axt): x1[j] -= xsi * (dot(xsi.conj(), axj) / innerprod[i]) axt = None max_innerprod = 0 idx = [] for i, xi in enumerate(x1): innerprod1 = dot(xi.conj(), xi).real max_innerprod = max(max_innerprod, innerprod1) if innerprod1 > lindep and innerprod1 > tol**2: idx.append(i) innerprod.append(innerprod1) log.debug('krylov cycle %d r = %g', cycle, max_innerprod**.5) if max_innerprod < lindep or max_innerprod < tol**2: break x1 = x1[idx] nd = cycle + 1 h = numpy.empty((nd,nd), dtype=x1.dtype) for i in range(nd): xi = numpy.asarray(xs[i]) if hermi: for j in range(i+1): h[i,j] = dot(xi.conj(), ax[j]) h[j,i] = h[i,j].conj() else: for j in range(nd): h[i,j] = dot(xi.conj(), ax[j]) xi = None # Add the contribution of I in (1+a) for i in range(nd): h[i,i] += innerprod[i] g = numpy.zeros((nd,nroots), dtype=x1.dtype) if b.ndim == 1: g[0] = innerprod[0] else: # Restore the first nroots vectors, which are array b or b-(1+a)x0 for i in range(min(nd, nroots)): xsi = numpy.asarray(xs[i]) for j in range(nroots): g[i,j] = dot(xsi.conj(), b[j]) c = numpy.linalg.solve(h, g) x = _gen_x0(c, xs) if b.ndim == 1: x = x[0] if x0 is not None: x += x0 return x
[ "def", "krylov", "(", "aop", ",", "b", ",", "x0", "=", "None", ",", "tol", "=", "1e-10", ",", "max_cycle", "=", "30", ",", "dot", "=", "numpy", ".", "dot", ",", "lindep", "=", "DSOLVE_LINDEP", ",", "callback", "=", "None", ",", "hermi", "=", "False", ",", "max_memory", "=", "MAX_MEMORY", ",", "verbose", "=", "logger", ".", "WARN", ")", ":", "if", "isinstance", "(", "aop", ",", "numpy", ".", "ndarray", ")", "and", "aop", ".", "ndim", "==", "2", ":", "return", "numpy", ".", "linalg", ".", "solve", "(", "aop", "+", "numpy", ".", "eye", "(", "aop", ".", "shape", "[", "0", "]", ")", ",", "b", ")", "if", "isinstance", "(", "verbose", ",", "logger", ".", "Logger", ")", ":", "log", "=", "verbose", "else", ":", "log", "=", "logger", ".", "Logger", "(", "sys", ".", "stdout", ",", "verbose", ")", "if", "not", "(", "isinstance", "(", "b", ",", "numpy", ".", "ndarray", ")", "and", "b", ".", "ndim", "==", "1", ")", ":", "b", "=", "numpy", ".", "asarray", "(", "b", ")", "if", "x0", "is", "None", ":", "x1", "=", "b", "else", ":", "b", "=", "b", "-", "(", "x0", "+", "aop", "(", "x0", ")", ")", "x1", "=", "b", "if", "x1", ".", "ndim", "==", "1", ":", "x1", "=", "x1", ".", "reshape", "(", "1", ",", "x1", ".", "size", ")", "nroots", ",", "ndim", "=", "x1", ".", "shape", "# Not exactly QR, vectors are orthogonal but not normalized", "x1", ",", "rmat", "=", "_qr", "(", "x1", ",", "dot", ",", "lindep", ")", "for", "i", "in", "range", "(", "len", "(", "x1", ")", ")", ":", "x1", "[", "i", "]", "*=", "rmat", "[", "i", ",", "i", "]", "innerprod", "=", "[", "dot", "(", "xi", ".", "conj", "(", ")", ",", "xi", ")", ".", "real", "for", "xi", "in", "x1", "]", "if", "innerprod", ":", "max_innerprod", "=", "max", "(", "innerprod", ")", "else", ":", "max_innerprod", "=", "0", "if", "max_innerprod", "<", "lindep", "or", "max_innerprod", "<", "tol", "**", "2", ":", "if", "x0", "is", "None", ":", "return", "numpy", ".", "zeros_like", "(", "b", ")", "else", ":", "return", "x0", "_incore", "=", "max_memory", "*", "1e6", "/", "b", ".", "nbytes", ">", "14", "log", ".", "debug1", "(", "'max_memory %d incore %s'", ",", "max_memory", ",", "_incore", ")", "if", "_incore", ":", "xs", "=", "[", "]", "ax", "=", "[", "]", "else", ":", "xs", "=", "_Xlist", "(", ")", "ax", "=", "_Xlist", "(", ")", "max_cycle", "=", "min", "(", "max_cycle", ",", "ndim", ")", "for", "cycle", "in", "range", "(", "max_cycle", ")", ":", "axt", "=", "aop", "(", "x1", ")", "if", "axt", ".", "ndim", "==", "1", ":", "axt", "=", "axt", ".", "reshape", "(", "1", ",", "ndim", ")", "xs", ".", "extend", "(", "x1", ")", "ax", ".", "extend", "(", "axt", ")", "if", "callable", "(", "callback", ")", ":", "callback", "(", "cycle", ",", "xs", ",", "ax", ")", "x1", "=", "axt", ".", "copy", "(", ")", "for", "i", "in", "range", "(", "len", "(", "xs", ")", ")", ":", "xsi", "=", "numpy", ".", "asarray", "(", "xs", "[", "i", "]", ")", "for", "j", ",", "axj", "in", "enumerate", "(", "axt", ")", ":", "x1", "[", "j", "]", "-=", "xsi", "*", "(", "dot", "(", "xsi", ".", "conj", "(", ")", ",", "axj", ")", "/", "innerprod", "[", "i", "]", ")", "axt", "=", "None", "max_innerprod", "=", "0", "idx", "=", "[", "]", "for", "i", ",", "xi", "in", "enumerate", "(", "x1", ")", ":", "innerprod1", "=", "dot", "(", "xi", ".", "conj", "(", ")", ",", "xi", ")", ".", "real", "max_innerprod", "=", "max", "(", "max_innerprod", ",", "innerprod1", ")", "if", "innerprod1", ">", "lindep", "and", "innerprod1", ">", "tol", "**", "2", ":", "idx", ".", "append", "(", "i", ")", "innerprod", ".", "append", "(", "innerprod1", ")", "log", ".", "debug", "(", "'krylov cycle %d r = %g'", ",", "cycle", ",", "max_innerprod", "**", ".5", ")", "if", "max_innerprod", "<", "lindep", "or", "max_innerprod", "<", "tol", "**", "2", ":", "break", "x1", "=", "x1", "[", "idx", "]", "nd", "=", "cycle", "+", "1", "h", "=", "numpy", ".", "empty", "(", "(", "nd", ",", "nd", ")", ",", "dtype", "=", "x1", ".", "dtype", ")", "for", "i", "in", "range", "(", "nd", ")", ":", "xi", "=", "numpy", ".", "asarray", "(", "xs", "[", "i", "]", ")", "if", "hermi", ":", "for", "j", "in", "range", "(", "i", "+", "1", ")", ":", "h", "[", "i", ",", "j", "]", "=", "dot", "(", "xi", ".", "conj", "(", ")", ",", "ax", "[", "j", "]", ")", "h", "[", "j", ",", "i", "]", "=", "h", "[", "i", ",", "j", "]", ".", "conj", "(", ")", "else", ":", "for", "j", "in", "range", "(", "nd", ")", ":", "h", "[", "i", ",", "j", "]", "=", "dot", "(", "xi", ".", "conj", "(", ")", ",", "ax", "[", "j", "]", ")", "xi", "=", "None", "# Add the contribution of I in (1+a)", "for", "i", "in", "range", "(", "nd", ")", ":", "h", "[", "i", ",", "i", "]", "+=", "innerprod", "[", "i", "]", "g", "=", "numpy", ".", "zeros", "(", "(", "nd", ",", "nroots", ")", ",", "dtype", "=", "x1", ".", "dtype", ")", "if", "b", ".", "ndim", "==", "1", ":", "g", "[", "0", "]", "=", "innerprod", "[", "0", "]", "else", ":", "# Restore the first nroots vectors, which are array b or b-(1+a)x0", "for", "i", "in", "range", "(", "min", "(", "nd", ",", "nroots", ")", ")", ":", "xsi", "=", "numpy", ".", "asarray", "(", "xs", "[", "i", "]", ")", "for", "j", "in", "range", "(", "nroots", ")", ":", "g", "[", "i", ",", "j", "]", "=", "dot", "(", "xsi", ".", "conj", "(", ")", ",", "b", "[", "j", "]", ")", "c", "=", "numpy", ".", "linalg", ".", "solve", "(", "h", ",", "g", ")", "x", "=", "_gen_x0", "(", "c", ",", "xs", ")", "if", "b", ".", "ndim", "==", "1", ":", "x", "=", "x", "[", "0", "]", "if", "x0", "is", "not", "None", ":", "x", "+=", "x0", "return", "x" ]
https://github.com/pyscf/pyscf/blob/0adfb464333f5ceee07b664f291d4084801bae64/pyscf/lib/linalg_helper.py#L1273-L1428
CvvT/dumpDex
92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1
python/idaapi.py
python
loader_input_t_from_fp
(*args)
return _idaapi.loader_input_t_from_fp(*args)
loader_input_t_from_fp(fp) -> loader_input_t
loader_input_t_from_fp(fp) -> loader_input_t
[ "loader_input_t_from_fp", "(", "fp", ")", "-", ">", "loader_input_t" ]
def loader_input_t_from_fp(*args): """ loader_input_t_from_fp(fp) -> loader_input_t """ return _idaapi.loader_input_t_from_fp(*args)
[ "def", "loader_input_t_from_fp", "(", "*", "args", ")", ":", "return", "_idaapi", ".", "loader_input_t_from_fp", "(", "*", "args", ")" ]
https://github.com/CvvT/dumpDex/blob/92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1/python/idaapi.py#L25773-L25777
wxWidgets/Phoenix
b2199e299a6ca6d866aa6f3d0888499136ead9d6
wx/lib/agw/flatnotebook.py
python
FlatNotebook.SetRightClickMenu
(self, menu)
Sets the popup menu associated to a right click on a tab. :param `menu`: an instance of :class:`wx.Menu`.
Sets the popup menu associated to a right click on a tab.
[ "Sets", "the", "popup", "menu", "associated", "to", "a", "right", "click", "on", "a", "tab", "." ]
def SetRightClickMenu(self, menu): """ Sets the popup menu associated to a right click on a tab. :param `menu`: an instance of :class:`wx.Menu`. """ self._pages._pRightClickMenu = menu
[ "def", "SetRightClickMenu", "(", "self", ",", "menu", ")", ":", "self", ".", "_pages", ".", "_pRightClickMenu", "=", "menu" ]
https://github.com/wxWidgets/Phoenix/blob/b2199e299a6ca6d866aa6f3d0888499136ead9d6/wx/lib/agw/flatnotebook.py#L4848-L4855
donnemartin/data-science-ipython-notebooks
5b3c00d462c6e9200315afe46d0093948621eb95
scipy/thinkstats2.py
python
Hist.Freq
(self, x)
return self.d.get(x, 0)
Gets the frequency associated with the value x. Args: x: number value Returns: int frequency
Gets the frequency associated with the value x.
[ "Gets", "the", "frequency", "associated", "with", "the", "value", "x", "." ]
def Freq(self, x): """Gets the frequency associated with the value x. Args: x: number value Returns: int frequency """ return self.d.get(x, 0)
[ "def", "Freq", "(", "self", ",", "x", ")", ":", "return", "self", ".", "d", ".", "get", "(", "x", ",", "0", ")" ]
https://github.com/donnemartin/data-science-ipython-notebooks/blob/5b3c00d462c6e9200315afe46d0093948621eb95/scipy/thinkstats2.py#L373-L382
spesmilo/electrum
bdbd59300fbd35b01605e66145458e5f396108e8
electrum/gui/qt/main_window.py
python
ElectrumWindow.tx_from_text
(self, data: Union[str, bytes])
[]
def tx_from_text(self, data: Union[str, bytes]) -> Union[None, 'PartialTransaction', 'Transaction']: from electrum.transaction import tx_from_any try: return tx_from_any(data) except BaseException as e: self.show_critical(_("Electrum was unable to parse your transaction") + ":\n" + repr(e)) return
[ "def", "tx_from_text", "(", "self", ",", "data", ":", "Union", "[", "str", ",", "bytes", "]", ")", "->", "Union", "[", "None", ",", "'PartialTransaction'", ",", "'Transaction'", "]", ":", "from", "electrum", ".", "transaction", "import", "tx_from_any", "try", ":", "return", "tx_from_any", "(", "data", ")", "except", "BaseException", "as", "e", ":", "self", ".", "show_critical", "(", "_", "(", "\"Electrum was unable to parse your transaction\"", ")", "+", "\":\\n\"", "+", "repr", "(", "e", ")", ")", "return" ]
https://github.com/spesmilo/electrum/blob/bdbd59300fbd35b01605e66145458e5f396108e8/electrum/gui/qt/main_window.py#L2827-L2833
elastic/elasticsearch-py
6ef1adfa3c840a84afda7369cd8e43ae7dc45cdb
elasticsearch/_sync/client/rollup.py
python
RollupClient.get_jobs
( self, *, id: Optional[Any] = None, error_trace: Optional[bool] = None, filter_path: Optional[Union[List[str], str]] = None, human: Optional[bool] = None, pretty: Optional[bool] = None, )
return self._perform_request("GET", __target, headers=__headers)
Retrieves the configuration, stats, and status of rollup jobs. `<https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-get-job.html>`_ :param id: The ID of the job(s) to fetch. Accepts glob patterns, or left blank for all jobs
Retrieves the configuration, stats, and status of rollup jobs.
[ "Retrieves", "the", "configuration", "stats", "and", "status", "of", "rollup", "jobs", "." ]
def get_jobs( self, *, id: Optional[Any] = None, error_trace: Optional[bool] = None, filter_path: Optional[Union[List[str], str]] = None, human: Optional[bool] = None, pretty: Optional[bool] = None, ) -> ObjectApiResponse[Any]: """ Retrieves the configuration, stats, and status of rollup jobs. `<https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-get-job.html>`_ :param id: The ID of the job(s) to fetch. Accepts glob patterns, or left blank for all jobs """ if id not in SKIP_IN_PATH: __path = f"/_rollup/job/{_quote(id)}" else: __path = "/_rollup/job" __query: Dict[str, Any] = {} if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: __query["filter_path"] = filter_path if human is not None: __query["human"] = human if pretty is not None: __query["pretty"] = pretty if __query: __target = f"{__path}?{_quote_query(__query)}" else: __target = __path __headers = {"accept": "application/json"} return self._perform_request("GET", __target, headers=__headers)
[ "def", "get_jobs", "(", "self", ",", "*", ",", "id", ":", "Optional", "[", "Any", "]", "=", "None", ",", "error_trace", ":", "Optional", "[", "bool", "]", "=", "None", ",", "filter_path", ":", "Optional", "[", "Union", "[", "List", "[", "str", "]", ",", "str", "]", "]", "=", "None", ",", "human", ":", "Optional", "[", "bool", "]", "=", "None", ",", "pretty", ":", "Optional", "[", "bool", "]", "=", "None", ",", ")", "->", "ObjectApiResponse", "[", "Any", "]", ":", "if", "id", "not", "in", "SKIP_IN_PATH", ":", "__path", "=", "f\"/_rollup/job/{_quote(id)}\"", "else", ":", "__path", "=", "\"/_rollup/job\"", "__query", ":", "Dict", "[", "str", ",", "Any", "]", "=", "{", "}", "if", "error_trace", "is", "not", "None", ":", "__query", "[", "\"error_trace\"", "]", "=", "error_trace", "if", "filter_path", "is", "not", "None", ":", "__query", "[", "\"filter_path\"", "]", "=", "filter_path", "if", "human", "is", "not", "None", ":", "__query", "[", "\"human\"", "]", "=", "human", "if", "pretty", "is", "not", "None", ":", "__query", "[", "\"pretty\"", "]", "=", "pretty", "if", "__query", ":", "__target", "=", "f\"{__path}?{_quote_query(__query)}\"", "else", ":", "__target", "=", "__path", "__headers", "=", "{", "\"accept\"", ":", "\"application/json\"", "}", "return", "self", ".", "_perform_request", "(", "\"GET\"", ",", "__target", ",", "headers", "=", "__headers", ")" ]
https://github.com/elastic/elasticsearch-py/blob/6ef1adfa3c840a84afda7369cd8e43ae7dc45cdb/elasticsearch/_sync/client/rollup.py#L64-L99
tp4a/teleport
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
server/www/packages/packages-darwin/x64/mako/_ast_util.py
python
get_child_nodes
(node)
return list(iter_child_nodes(node))
Like `iter_child_nodes` but returns a list.
Like `iter_child_nodes` but returns a list.
[ "Like", "iter_child_nodes", "but", "returns", "a", "list", "." ]
def get_child_nodes(node): """Like `iter_child_nodes` but returns a list.""" return list(iter_child_nodes(node))
[ "def", "get_child_nodes", "(", "node", ")", ":", "return", "list", "(", "iter_child_nodes", "(", "node", ")", ")" ]
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-darwin/x64/mako/_ast_util.py#L205-L207
chribsen/simple-machine-learning-examples
dc94e52a4cebdc8bb959ff88b81ff8cfeca25022
venv/lib/python2.7/site-packages/scipy/signal/ltisys.py
python
TransferFunction.den
(self)
return self._den
Denominator of the `TransferFunction` system.
Denominator of the `TransferFunction` system.
[ "Denominator", "of", "the", "TransferFunction", "system", "." ]
def den(self): """Denominator of the `TransferFunction` system.""" return self._den
[ "def", "den", "(", "self", ")", ":", "return", "self", ".", "_den" ]
https://github.com/chribsen/simple-machine-learning-examples/blob/dc94e52a4cebdc8bb959ff88b81ff8cfeca25022/venv/lib/python2.7/site-packages/scipy/signal/ltisys.py#L789-L791
kubernetes-client/python
47b9da9de2d02b2b7a34fbe05afb44afd130d73a
kubernetes/client/models/v1_policy_rule.py
python
V1PolicyRule.__init__
(self, api_groups=None, non_resource_ur_ls=None, resource_names=None, resources=None, verbs=None, local_vars_configuration=None)
V1PolicyRule - a model defined in OpenAPI
V1PolicyRule - a model defined in OpenAPI
[ "V1PolicyRule", "-", "a", "model", "defined", "in", "OpenAPI" ]
def __init__(self, api_groups=None, non_resource_ur_ls=None, resource_names=None, resources=None, verbs=None, local_vars_configuration=None): # noqa: E501 """V1PolicyRule - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._api_groups = None self._non_resource_ur_ls = None self._resource_names = None self._resources = None self._verbs = None self.discriminator = None if api_groups is not None: self.api_groups = api_groups if non_resource_ur_ls is not None: self.non_resource_ur_ls = non_resource_ur_ls if resource_names is not None: self.resource_names = resource_names if resources is not None: self.resources = resources self.verbs = verbs
[ "def", "__init__", "(", "self", ",", "api_groups", "=", "None", ",", "non_resource_ur_ls", "=", "None", ",", "resource_names", "=", "None", ",", "resources", "=", "None", ",", "verbs", "=", "None", ",", "local_vars_configuration", "=", "None", ")", ":", "# noqa: E501", "# noqa: E501", "if", "local_vars_configuration", "is", "None", ":", "local_vars_configuration", "=", "Configuration", "(", ")", "self", ".", "local_vars_configuration", "=", "local_vars_configuration", "self", ".", "_api_groups", "=", "None", "self", ".", "_non_resource_ur_ls", "=", "None", "self", ".", "_resource_names", "=", "None", "self", ".", "_resources", "=", "None", "self", ".", "_verbs", "=", "None", "self", ".", "discriminator", "=", "None", "if", "api_groups", "is", "not", "None", ":", "self", ".", "api_groups", "=", "api_groups", "if", "non_resource_ur_ls", "is", "not", "None", ":", "self", ".", "non_resource_ur_ls", "=", "non_resource_ur_ls", "if", "resource_names", "is", "not", "None", ":", "self", ".", "resource_names", "=", "resource_names", "if", "resources", "is", "not", "None", ":", "self", ".", "resources", "=", "resources", "self", ".", "verbs", "=", "verbs" ]
https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/models/v1_policy_rule.py#L51-L72
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/Python-2.7.9/Tools/pybench/pybench.py
python
Test.get_timer
(self)
return get_timer(self.timer)
Return the timer function to use for the test.
Return the timer function to use for the test.
[ "Return", "the", "timer", "function", "to", "use", "for", "the", "test", "." ]
def get_timer(self): """ Return the timer function to use for the test. """ return get_timer(self.timer)
[ "def", "get_timer", "(", "self", ")", ":", "return", "get_timer", "(", "self", ".", "timer", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/Python-2.7.9/Tools/pybench/pybench.py#L245-L250
python/cpython
e13cdca0f5224ec4e23bdd04bb3120506964bc8b
Lib/logging/__init__.py
python
makeLogRecord
(dict)
return rv
Make a LogRecord whose attributes are defined by the specified dictionary, This function is useful for converting a logging event received over a socket connection (which is sent as a dictionary) into a LogRecord instance.
Make a LogRecord whose attributes are defined by the specified dictionary, This function is useful for converting a logging event received over a socket connection (which is sent as a dictionary) into a LogRecord instance.
[ "Make", "a", "LogRecord", "whose", "attributes", "are", "defined", "by", "the", "specified", "dictionary", "This", "function", "is", "useful", "for", "converting", "a", "logging", "event", "received", "over", "a", "socket", "connection", "(", "which", "is", "sent", "as", "a", "dictionary", ")", "into", "a", "LogRecord", "instance", "." ]
def makeLogRecord(dict): """ Make a LogRecord whose attributes are defined by the specified dictionary, This function is useful for converting a logging event received over a socket connection (which is sent as a dictionary) into a LogRecord instance. """ rv = _logRecordFactory(None, None, "", 0, "", (), None, None) rv.__dict__.update(dict) return rv
[ "def", "makeLogRecord", "(", "dict", ")", ":", "rv", "=", "_logRecordFactory", "(", "None", ",", "None", ",", "\"\"", ",", "0", ",", "\"\"", ",", "(", ")", ",", "None", ",", "None", ")", "rv", ".", "__dict__", ".", "update", "(", "dict", ")", "return", "rv" ]
https://github.com/python/cpython/blob/e13cdca0f5224ec4e23bdd04bb3120506964bc8b/Lib/logging/__init__.py#L396-L405
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/Python-2.7.9/Tools/scripts/texi2html.py
python
TexinfoParser.bgn_example
(self, args)
[]
def bgn_example(self, args): self.nofill = self.nofill + 1 self.write('<PRE>')
[ "def", "bgn_example", "(", "self", ",", "args", ")", ":", "self", ".", "nofill", "=", "self", ".", "nofill", "+", "1", "self", ".", "write", "(", "'<PRE>'", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/Python-2.7.9/Tools/scripts/texi2html.py#L1482-L1484
mysql/mysql-connector-python
c5460bcbb0dff8e4e48bf4af7a971c89bf486d85
lib/mysqlx/result.py
python
Column.get_character_set_name
(self)
return self._character_set_name
Returns the character set name. Returns: str: The character set name.
Returns the character set name.
[ "Returns", "the", "character", "set", "name", "." ]
def get_character_set_name(self): """Returns the character set name. Returns: str: The character set name. """ return self._character_set_name
[ "def", "get_character_set_name", "(", "self", ")", ":", "return", "self", ".", "_character_set_name" ]
https://github.com/mysql/mysql-connector-python/blob/c5460bcbb0dff8e4e48bf4af7a971c89bf486d85/lib/mysqlx/result.py#L725-L731
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
osh/builtin_process.py
python
Fg.Run
(self, cmd_val)
return self.job_state.WhenContinued(pid, self.waiter)
[]
def Run(self, cmd_val): # type: (cmd_value__Argv) -> int # Note: 'fg' currently works with processes, but not pipelines. See issue # #360. Part of it is that we should use posix.killpg(). pid = self.job_state.GetLastStopped() if pid == -1: log('No job to put in the foreground') return 1 # TODO: Print job ID rather than the PID log('Continue PID %d', pid) posix.kill(pid, signal.SIGCONT) return self.job_state.WhenContinued(pid, self.waiter)
[ "def", "Run", "(", "self", ",", "cmd_val", ")", ":", "# type: (cmd_value__Argv) -> int", "# Note: 'fg' currently works with processes, but not pipelines. See issue", "# #360. Part of it is that we should use posix.killpg().", "pid", "=", "self", ".", "job_state", ".", "GetLastStopped", "(", ")", "if", "pid", "==", "-", "1", ":", "log", "(", "'No job to put in the foreground'", ")", "return", "1", "# TODO: Print job ID rather than the PID", "log", "(", "'Continue PID %d'", ",", "pid", ")", "posix", ".", "kill", "(", "pid", ",", "signal", ".", "SIGCONT", ")", "return", "self", ".", "job_state", ".", "WhenContinued", "(", "pid", ",", "self", ".", "waiter", ")" ]
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/osh/builtin_process.py#L247-L260
ScottfreeLLC/AlphaPy
e6419cc811c2a3abc1ad522a85a888c8ef386056
alphapy/transforms.py
python
streak
(vec)
return latest_streak
r"""Determine the length of the latest streak. Parameters ---------- vec : pandas.Series The input array for calculating the latest streak. Returns ------- latest_streak : int The length of the latest streak. Example ------- >>> vec.rolling(window=20).apply(streak)
r"""Determine the length of the latest streak.
[ "r", "Determine", "the", "length", "of", "the", "latest", "streak", "." ]
def streak(vec): r"""Determine the length of the latest streak. Parameters ---------- vec : pandas.Series The input array for calculating the latest streak. Returns ------- latest_streak : int The length of the latest streak. Example ------- >>> vec.rolling(window=20).apply(streak) """ latest_streak = [len(list(g)) for k, g in itertools.groupby(vec)][-1] return latest_streak
[ "def", "streak", "(", "vec", ")", ":", "latest_streak", "=", "[", "len", "(", "list", "(", "g", ")", ")", "for", "k", ",", "g", "in", "itertools", ".", "groupby", "(", "vec", ")", "]", "[", "-", "1", "]", "return", "latest_streak" ]
https://github.com/ScottfreeLLC/AlphaPy/blob/e6419cc811c2a3abc1ad522a85a888c8ef386056/alphapy/transforms.py#L1359-L1379
GoogleCloudPlatform/cloudml-samples
efddc4a9898127e55edc0946557aca4bfaf59705
tensorflow/standard/legacy/flowers/pipeline.py
python
FlowersE2E.make_request_json
(self, uri, output_json)
Produces a JSON request suitable to send to CloudML Prediction API. Args: uri: The input image URI. output_json: File handle of the output json where request will be written.
Produces a JSON request suitable to send to CloudML Prediction API.
[ "Produces", "a", "JSON", "request", "suitable", "to", "send", "to", "CloudML", "Prediction", "API", "." ]
def make_request_json(self, uri, output_json): """Produces a JSON request suitable to send to CloudML Prediction API. Args: uri: The input image URI. output_json: File handle of the output json where request will be written. """ def _open_file_read_binary(uri): try: return file_io.FileIO(uri, mode='rb') except errors.InvalidArgumentError: return file_io.FileIO(uri, mode='r') with open(output_json, 'w') as outf: with _open_file_read_binary(uri) as f: image_bytes = f.read() image = Image.open(io.BytesIO(image_bytes)).convert('RGB') image = image.resize((299, 299), Image.BILINEAR) resized_image = io.BytesIO() image.save(resized_image, format='JPEG') encoded_image = base64.b64encode(resized_image.getvalue()) row = json.dumps({'key': uri, 'image_bytes': {'b64': encoded_image}}) outf.write(row) outf.write('\n')
[ "def", "make_request_json", "(", "self", ",", "uri", ",", "output_json", ")", ":", "def", "_open_file_read_binary", "(", "uri", ")", ":", "try", ":", "return", "file_io", ".", "FileIO", "(", "uri", ",", "mode", "=", "'rb'", ")", "except", "errors", ".", "InvalidArgumentError", ":", "return", "file_io", ".", "FileIO", "(", "uri", ",", "mode", "=", "'r'", ")", "with", "open", "(", "output_json", ",", "'w'", ")", "as", "outf", ":", "with", "_open_file_read_binary", "(", "uri", ")", "as", "f", ":", "image_bytes", "=", "f", ".", "read", "(", ")", "image", "=", "Image", ".", "open", "(", "io", ".", "BytesIO", "(", "image_bytes", ")", ")", ".", "convert", "(", "'RGB'", ")", "image", "=", "image", ".", "resize", "(", "(", "299", ",", "299", ")", ",", "Image", ".", "BILINEAR", ")", "resized_image", "=", "io", ".", "BytesIO", "(", ")", "image", ".", "save", "(", "resized_image", ",", "format", "=", "'JPEG'", ")", "encoded_image", "=", "base64", ".", "b64encode", "(", "resized_image", ".", "getvalue", "(", ")", ")", "row", "=", "json", ".", "dumps", "(", "{", "'key'", ":", "uri", ",", "'image_bytes'", ":", "{", "'b64'", ":", "encoded_image", "}", "}", ")", "outf", ".", "write", "(", "row", ")", "outf", ".", "write", "(", "'\\n'", ")" ]
https://github.com/GoogleCloudPlatform/cloudml-samples/blob/efddc4a9898127e55edc0946557aca4bfaf59705/tensorflow/standard/legacy/flowers/pipeline.py#L349-L372
apache/libcloud
90971e17bfd7b6bb97b2489986472c531cc8e140
libcloud/compute/drivers/equinixmetal.py
python
EquinixMetalNodeDriver.create_volume_snapshot
(self, volume, name="")
return volume.list_snapshots()[-1]
Create a new volume snapshot. :param volume: Volume to create a snapshot for :type volume: class:`StorageVolume` :return: The newly created volume snapshot. :rtype: :class:`VolumeSnapshot`
Create a new volume snapshot.
[ "Create", "a", "new", "volume", "snapshot", "." ]
def create_volume_snapshot(self, volume, name=""): """ Create a new volume snapshot. :param volume: Volume to create a snapshot for :type volume: class:`StorageVolume` :return: The newly created volume snapshot. :rtype: :class:`VolumeSnapshot` """ path = "/metal/v1/storage/%s/snapshots" % volume.id res = self.connection.request(path, method="POST") assert res.status == httplib.ACCEPTED return volume.list_snapshots()[-1]
[ "def", "create_volume_snapshot", "(", "self", ",", "volume", ",", "name", "=", "\"\"", ")", ":", "path", "=", "\"/metal/v1/storage/%s/snapshots\"", "%", "volume", ".", "id", "res", "=", "self", ".", "connection", ".", "request", "(", "path", ",", "method", "=", "\"POST\"", ")", "assert", "res", ".", "status", "==", "httplib", ".", "ACCEPTED", "return", "volume", ".", "list_snapshots", "(", ")", "[", "-", "1", "]" ]
https://github.com/apache/libcloud/blob/90971e17bfd7b6bb97b2489986472c531cc8e140/libcloud/compute/drivers/equinixmetal.py#L856-L869
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
Python-2.7.13/Lib/lib-tk/ttk.py
python
Style.theme_create
(self, themename, parent=None, settings=None)
Creates a new theme. It is an error if themename already exists. If parent is specified, the new theme will inherit styles, elements and layouts from the specified parent theme. If settings are present, they are expected to have the same syntax used for theme_settings.
Creates a new theme.
[ "Creates", "a", "new", "theme", "." ]
def theme_create(self, themename, parent=None, settings=None): """Creates a new theme. It is an error if themename already exists. If parent is specified, the new theme will inherit styles, elements and layouts from the specified parent theme. If settings are present, they are expected to have the same syntax used for theme_settings.""" script = _script_from_settings(settings) if settings else '' if parent: self.tk.call(self._name, "theme", "create", themename, "-parent", parent, "-settings", script) else: self.tk.call(self._name, "theme", "create", themename, "-settings", script)
[ "def", "theme_create", "(", "self", ",", "themename", ",", "parent", "=", "None", ",", "settings", "=", "None", ")", ":", "script", "=", "_script_from_settings", "(", "settings", ")", "if", "settings", "else", "''", "if", "parent", ":", "self", ".", "tk", ".", "call", "(", "self", ".", "_name", ",", "\"theme\"", ",", "\"create\"", ",", "themename", ",", "\"-parent\"", ",", "parent", ",", "\"-settings\"", ",", "script", ")", "else", ":", "self", ".", "tk", ".", "call", "(", "self", ".", "_name", ",", "\"theme\"", ",", "\"create\"", ",", "themename", ",", "\"-settings\"", ",", "script", ")" ]
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/lib-tk/ttk.py#L479-L493
CalebBell/thermo
572a47d1b03d49fe609b8d5f826fa6a7cde00828
thermo/phases/helmholtz_eos.py
python
HelmholtzEOS.dA_dtau
(self)
return dA_dtau
[]
def dA_dtau(self): try: return self._dA_dtau except: pass dA_dtau = self._dA_dtau = self._dAr_dtau_func(self.tau, self.delta) + self.dA0_dtau return dA_dtau
[ "def", "dA_dtau", "(", "self", ")", ":", "try", ":", "return", "self", ".", "_dA_dtau", "except", ":", "pass", "dA_dtau", "=", "self", ".", "_dA_dtau", "=", "self", ".", "_dAr_dtau_func", "(", "self", ".", "tau", ",", "self", ".", "delta", ")", "+", "self", ".", "dA0_dtau", "return", "dA_dtau" ]
https://github.com/CalebBell/thermo/blob/572a47d1b03d49fe609b8d5f826fa6a7cde00828/thermo/phases/helmholtz_eos.py#L100-L106
tensorflow/models
6b8bb0cbeb3e10415c7a87448f08adc3c484c1d3
research/cognitive_planning/train_supervised_active_vision.py
python
create_modality_types
()
return [conversion_dict[k] for k in modality_types]
Parses the modality_types and returns a list of task_env.ModalityType.
Parses the modality_types and returns a list of task_env.ModalityType.
[ "Parses", "the", "modality_types", "and", "returns", "a", "list", "of", "task_env", ".", "ModalityType", "." ]
def create_modality_types(): """Parses the modality_types and returns a list of task_env.ModalityType.""" if not FLAGS.modality_types: raise ValueError('there needs to be at least one modality type') modality_types = FLAGS.modality_types.split('_') for x in modality_types: if x not in ['image', 'sseg', 'det', 'depth']: raise ValueError('invalid modality type: {}'.format(x)) conversion_dict = { 'image': task_env.ModalityTypes.IMAGE, 'sseg': task_env.ModalityTypes.SEMANTIC_SEGMENTATION, 'depth': task_env.ModalityTypes.DEPTH, 'det': task_env.ModalityTypes.OBJECT_DETECTION, } return [conversion_dict[k] for k in modality_types]
[ "def", "create_modality_types", "(", ")", ":", "if", "not", "FLAGS", ".", "modality_types", ":", "raise", "ValueError", "(", "'there needs to be at least one modality type'", ")", "modality_types", "=", "FLAGS", ".", "modality_types", ".", "split", "(", "'_'", ")", "for", "x", "in", "modality_types", ":", "if", "x", "not", "in", "[", "'image'", ",", "'sseg'", ",", "'det'", ",", "'depth'", "]", ":", "raise", "ValueError", "(", "'invalid modality type: {}'", ".", "format", "(", "x", ")", ")", "conversion_dict", "=", "{", "'image'", ":", "task_env", ".", "ModalityTypes", ".", "IMAGE", ",", "'sseg'", ":", "task_env", ".", "ModalityTypes", ".", "SEMANTIC_SEGMENTATION", ",", "'depth'", ":", "task_env", ".", "ModalityTypes", ".", "DEPTH", ",", "'det'", ":", "task_env", ".", "ModalityTypes", ".", "OBJECT_DETECTION", ",", "}", "return", "[", "conversion_dict", "[", "k", "]", "for", "k", "in", "modality_types", "]" ]
https://github.com/tensorflow/models/blob/6b8bb0cbeb3e10415c7a87448f08adc3c484c1d3/research/cognitive_planning/train_supervised_active_vision.py#L138-L153
d6t/d6tflow
ccd161057793e04ac0d090a4968f1ac9abb43e5b
d6tflow/functional.py
python
Workflow.delete
(self, func_to_reset, *args, **kwargs)
Possibly dangerous! `delete(func)` will delete *all files* in the `data/func` directory of the given func. Useful if you want to delete all function related outputs. Consider using `reset(func, params)` to reset a specific func
Possibly dangerous! `delete(func)` will delete *all files* in the `data/func` directory of the given func. Useful if you want to delete all function related outputs. Consider using `reset(func, params)` to reset a specific func
[ "Possibly", "dangerous!", "delete", "(", "func", ")", "will", "delete", "*", "all", "files", "*", "in", "the", "data", "/", "func", "directory", "of", "the", "given", "func", ".", "Useful", "if", "you", "want", "to", "delete", "all", "function", "related", "outputs", ".", "Consider", "using", "reset", "(", "func", "params", ")", "to", "reset", "a", "specific", "func" ]
def delete(self, func_to_reset, *args, **kwargs): """Possibly dangerous! `delete(func)` will delete *all files* in the `data/func` directory of the given func. Useful if you want to delete all function related outputs. Consider using `reset(func, params)` to reset a specific func """ name = func_to_reset if isinstance( func_to_reset, str) else func_to_reset.__name__ task = self.steps[name]() if task.path: dirpath = pathlib.Path(task.path) else: dirpath = d6tflow.settings.dirpath path = task._getpath(dirpath, []) for f in path.parent.glob('*'): f.unlink()
[ "def", "delete", "(", "self", ",", "func_to_reset", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "name", "=", "func_to_reset", "if", "isinstance", "(", "func_to_reset", ",", "str", ")", "else", "func_to_reset", ".", "__name__", "task", "=", "self", ".", "steps", "[", "name", "]", "(", ")", "if", "task", ".", "path", ":", "dirpath", "=", "pathlib", ".", "Path", "(", "task", ".", "path", ")", "else", ":", "dirpath", "=", "d6tflow", ".", "settings", ".", "dirpath", "path", "=", "task", ".", "_getpath", "(", "dirpath", ",", "[", "]", ")", "for", "f", "in", "path", ".", "parent", ".", "glob", "(", "'*'", ")", ":", "f", ".", "unlink", "(", ")" ]
https://github.com/d6t/d6tflow/blob/ccd161057793e04ac0d090a4968f1ac9abb43e5b/d6tflow/functional.py#L307-L322
robotlearn/pyrobolearn
9cd7c060723fda7d2779fa255ac998c2c82b8436
pyrobolearn/models/model.py
python
Model.is_discriminative
()
Return True if the model is discriminative, that is, if the model estimates the conditional probability :math:`p(y|x)`. Returns: bool: True if the model is discriminative.
Return True if the model is discriminative, that is, if the model estimates the conditional probability :math:`p(y|x)`.
[ "Return", "True", "if", "the", "model", "is", "discriminative", "that", "is", "if", "the", "model", "estimates", "the", "conditional", "probability", ":", "math", ":", "p", "(", "y|x", ")", "." ]
def is_discriminative(): """ Return True if the model is discriminative, that is, if the model estimates the conditional probability :math:`p(y|x)`. Returns: bool: True if the model is discriminative. """ raise NotImplementedError
[ "def", "is_discriminative", "(", ")", ":", "raise", "NotImplementedError" ]
https://github.com/robotlearn/pyrobolearn/blob/9cd7c060723fda7d2779fa255ac998c2c82b8436/pyrobolearn/models/model.py#L176-L184
sqlalchemy/sqlalchemy
eb716884a4abcabae84a6aaba105568e925b7d27
lib/sqlalchemy/engine/interfaces.py
python
Dialect.get_foreign_keys
( self, connection: "Connection", table_name: str, schema: Optional[str] = None, **kw: Any, )
Return information about foreign_keys in ``table_name``. Given a :class:`_engine.Connection`, a string ``table_name``, and an optional string ``schema``, return foreign key information as a list of dicts corresponding to the :class:`.ReflectedForeignKeyConstraint` dictionary.
Return information about foreign_keys in ``table_name``.
[ "Return", "information", "about", "foreign_keys", "in", "table_name", "." ]
def get_foreign_keys( self, connection: "Connection", table_name: str, schema: Optional[str] = None, **kw: Any, ) -> List[ReflectedForeignKeyConstraint]: """Return information about foreign_keys in ``table_name``. Given a :class:`_engine.Connection`, a string ``table_name``, and an optional string ``schema``, return foreign key information as a list of dicts corresponding to the :class:`.ReflectedForeignKeyConstraint` dictionary. """ raise NotImplementedError()
[ "def", "get_foreign_keys", "(", "self", ",", "connection", ":", "\"Connection\"", ",", "table_name", ":", "str", ",", "schema", ":", "Optional", "[", "str", "]", "=", "None", ",", "*", "*", "kw", ":", "Any", ",", ")", "->", "List", "[", "ReflectedForeignKeyConstraint", "]", ":", "raise", "NotImplementedError", "(", ")" ]
https://github.com/sqlalchemy/sqlalchemy/blob/eb716884a4abcabae84a6aaba105568e925b7d27/lib/sqlalchemy/engine/interfaces.py#L764-L780
IJDykeman/wangTiles
7c1ee2095ebdf7f72bce07d94c6484915d5cae8b
experimental_code/tiles_3d/venv_mac_py3/lib/python2.7/site-packages/setuptools/command/egg_info.py
python
FileList.prune
(self, dir)
return self._remove_files(match.match)
Filter out files from 'dir/'.
Filter out files from 'dir/'.
[ "Filter", "out", "files", "from", "dir", "/", "." ]
def prune(self, dir): """Filter out files from 'dir/'.""" match = translate_pattern(os.path.join(dir, '**')) return self._remove_files(match.match)
[ "def", "prune", "(", "self", ",", "dir", ")", ":", "match", "=", "translate_pattern", "(", "os", ".", "path", ".", "join", "(", "dir", ",", "'**'", ")", ")", "return", "self", ".", "_remove_files", "(", "match", ".", "match", ")" ]
https://github.com/IJDykeman/wangTiles/blob/7c1ee2095ebdf7f72bce07d94c6484915d5cae8b/experimental_code/tiles_3d/venv_mac_py3/lib/python2.7/site-packages/setuptools/command/egg_info.py#L449-L452
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/mediaroom/media_player.py
python
MediaroomDevice.async_turn_off
(self)
Turn off the receiver.
Turn off the receiver.
[ "Turn", "off", "the", "receiver", "." ]
async def async_turn_off(self): """Turn off the receiver.""" try: self.set_state(await self.stb.turn_off()) if self._optimistic: self._state = STATE_STANDBY self._available = True except PyMediaroomError: self._available = False self.async_write_ha_state()
[ "async", "def", "async_turn_off", "(", "self", ")", ":", "try", ":", "self", ".", "set_state", "(", "await", "self", ".", "stb", ".", "turn_off", "(", ")", ")", "if", "self", ".", "_optimistic", ":", "self", ".", "_state", "=", "STATE_STANDBY", "self", ".", "_available", "=", "True", "except", "PyMediaroomError", ":", "self", ".", "_available", "=", "False", "self", ".", "async_write_ha_state", "(", ")" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/mediaroom/media_player.py#L272-L282
digidotcom/xbee-python
0757f4be0017530c205175fbee8f9f61be9614d1
digi/xbee/packets/zigbee.py
python
RouteRecordIndicatorPacket.create_packet
(raw, operating_mode)
return RouteRecordIndicatorPacket( XBee64BitAddress(raw[4:12]), XBee16BitAddress(raw[12:14]), raw[14], hops, op_mode=operating_mode)
Override method. Returns: :class:`.RouteRecordIndicatorPacket`. Raises: InvalidPacketException: If the bytearray length is less than 17. (start delim. + length (2 bytes) + frame type + 64bit addr. + 16bit addr. + Receive options + num of addrs + checksum = 17 bytes). InvalidPacketException: If the length field of `raw` is different from its real length. (length field: bytes 1 and 3) InvalidPacketException: If the first byte of 'raw' is not the header byte. See :class:`.SpecialByte`. InvalidPacketException: If the calculated checksum is different from the checksum field value (last byte). InvalidPacketException: If the frame type is not :attr:`.ApiFrameType.ROUTE_RECORD_INDICATOR`. InvalidPacketException: If the number of hops does not match with the number of 16-bit addresses. InvalidOperatingModeException: If `operating_mode` is not supported. .. seealso:: | :meth:`.XBeePacket.create_packet` | :meth:`.XBeeAPIPacket._check_api_packet`
Override method.
[ "Override", "method", "." ]
def create_packet(raw, operating_mode): """ Override method. Returns: :class:`.RouteRecordIndicatorPacket`. Raises: InvalidPacketException: If the bytearray length is less than 17. (start delim. + length (2 bytes) + frame type + 64bit addr. + 16bit addr. + Receive options + num of addrs + checksum = 17 bytes). InvalidPacketException: If the length field of `raw` is different from its real length. (length field: bytes 1 and 3) InvalidPacketException: If the first byte of 'raw' is not the header byte. See :class:`.SpecialByte`. InvalidPacketException: If the calculated checksum is different from the checksum field value (last byte). InvalidPacketException: If the frame type is not :attr:`.ApiFrameType.ROUTE_RECORD_INDICATOR`. InvalidPacketException: If the number of hops does not match with the number of 16-bit addresses. InvalidOperatingModeException: If `operating_mode` is not supported. .. seealso:: | :meth:`.XBeePacket.create_packet` | :meth:`.XBeeAPIPacket._check_api_packet` """ if operating_mode not in (OperatingMode.ESCAPED_API_MODE, OperatingMode.API_MODE): raise InvalidOperatingModeException( operating_mode.name + " is not supported.") XBeeAPIPacket._check_api_packet( raw, min_length=RouteRecordIndicatorPacket.__MIN_PACKET_LENGTH) if raw[3] != ApiFrameType.ROUTE_RECORD_INDICATOR.code: raise InvalidPacketException( "This packet is not a Route Record Indicator packet.") hops = [XBee16BitAddress(raw[i:i+2]) for i in range(16, len(raw) - 1, 2)] if raw[15] != len(hops): raise InvalidPacketException("Specified number of hops does not" "match with the length of addresses.") return RouteRecordIndicatorPacket( XBee64BitAddress(raw[4:12]), XBee16BitAddress(raw[12:14]), raw[14], hops, op_mode=operating_mode)
[ "def", "create_packet", "(", "raw", ",", "operating_mode", ")", ":", "if", "operating_mode", "not", "in", "(", "OperatingMode", ".", "ESCAPED_API_MODE", ",", "OperatingMode", ".", "API_MODE", ")", ":", "raise", "InvalidOperatingModeException", "(", "operating_mode", ".", "name", "+", "\" is not supported.\"", ")", "XBeeAPIPacket", ".", "_check_api_packet", "(", "raw", ",", "min_length", "=", "RouteRecordIndicatorPacket", ".", "__MIN_PACKET_LENGTH", ")", "if", "raw", "[", "3", "]", "!=", "ApiFrameType", ".", "ROUTE_RECORD_INDICATOR", ".", "code", ":", "raise", "InvalidPacketException", "(", "\"This packet is not a Route Record Indicator packet.\"", ")", "hops", "=", "[", "XBee16BitAddress", "(", "raw", "[", "i", ":", "i", "+", "2", "]", ")", "for", "i", "in", "range", "(", "16", ",", "len", "(", "raw", ")", "-", "1", ",", "2", ")", "]", "if", "raw", "[", "15", "]", "!=", "len", "(", "hops", ")", ":", "raise", "InvalidPacketException", "(", "\"Specified number of hops does not\"", "\"match with the length of addresses.\"", ")", "return", "RouteRecordIndicatorPacket", "(", "XBee64BitAddress", "(", "raw", "[", "4", ":", "12", "]", ")", ",", "XBee16BitAddress", "(", "raw", "[", "12", ":", "14", "]", ")", ",", "raw", "[", "14", "]", ",", "hops", ",", "op_mode", "=", "operating_mode", ")" ]
https://github.com/digidotcom/xbee-python/blob/0757f4be0017530c205175fbee8f9f61be9614d1/digi/xbee/packets/zigbee.py#L439-L488
number5/cloud-init
19948dbaf40309355e1a2dbef116efb0ce66245c
cloudinit/util.py
python
blkid
(devs=None, disable_cache=False)
return ret
Get all device tags details from blkid. @param devs: Optional list of device paths you wish to query. @param disable_cache: Bool, set True to start with clean cache. @return: Dict of key value pairs of info for the device.
Get all device tags details from blkid.
[ "Get", "all", "device", "tags", "details", "from", "blkid", "." ]
def blkid(devs=None, disable_cache=False): """Get all device tags details from blkid. @param devs: Optional list of device paths you wish to query. @param disable_cache: Bool, set True to start with clean cache. @return: Dict of key value pairs of info for the device. """ if devs is None: devs = [] else: devs = list(devs) cmd = ["blkid", "-o", "full"] if disable_cache: cmd.extend(["-c", "/dev/null"]) cmd.extend(devs) # we have to decode with 'replace' as shelx.split (called by # load_shell_content) can't take bytes. So this is potentially # lossy of non-utf-8 chars in blkid output. out, _ = subp.subp(cmd, capture=True, decode="replace") ret = {} for line in out.splitlines(): dev, _, data = line.partition(":") ret[dev] = load_shell_content(data) ret[dev]["DEVNAME"] = dev return ret
[ "def", "blkid", "(", "devs", "=", "None", ",", "disable_cache", "=", "False", ")", ":", "if", "devs", "is", "None", ":", "devs", "=", "[", "]", "else", ":", "devs", "=", "list", "(", "devs", ")", "cmd", "=", "[", "\"blkid\"", ",", "\"-o\"", ",", "\"full\"", "]", "if", "disable_cache", ":", "cmd", ".", "extend", "(", "[", "\"-c\"", ",", "\"/dev/null\"", "]", ")", "cmd", ".", "extend", "(", "devs", ")", "# we have to decode with 'replace' as shelx.split (called by", "# load_shell_content) can't take bytes. So this is potentially", "# lossy of non-utf-8 chars in blkid output.", "out", ",", "_", "=", "subp", ".", "subp", "(", "cmd", ",", "capture", "=", "True", ",", "decode", "=", "\"replace\"", ")", "ret", "=", "{", "}", "for", "line", "in", "out", ".", "splitlines", "(", ")", ":", "dev", ",", "_", ",", "data", "=", "line", ".", "partition", "(", "\":\"", ")", "ret", "[", "dev", "]", "=", "load_shell_content", "(", "data", ")", "ret", "[", "dev", "]", "[", "\"DEVNAME\"", "]", "=", "dev", "return", "ret" ]
https://github.com/number5/cloud-init/blob/19948dbaf40309355e1a2dbef116efb0ce66245c/cloudinit/util.py#L1399-L1427
IDArlingTeam/IDArling
d15b9b7c8bdeb992c569efcc49adf7642bb82cdf
idarling/module.py
python
Module._uninstall
(self)
Uninstall the module. Overloaded by the module.
Uninstall the module. Overloaded by the module.
[ "Uninstall", "the", "module", ".", "Overloaded", "by", "the", "module", "." ]
def _uninstall(self): """Uninstall the module. Overloaded by the module.""" raise NotImplementedError("_uninstall() not implemented")
[ "def", "_uninstall", "(", "self", ")", ":", "raise", "NotImplementedError", "(", "\"_uninstall() not implemented\"", ")" ]
https://github.com/IDArlingTeam/IDArling/blob/d15b9b7c8bdeb992c569efcc49adf7642bb82cdf/idarling/module.py#L43-L45
log2timeline/plaso
fe2e316b8c76a0141760c0f2f181d84acb83abc2
plaso/parsers/plist_plugins/interface.py
python
PlistPathFilter.Match
(self, filename_lower_case)
return bool(filename_lower_case == self._filename_lower_case)
Determines if a plist filename matches the filter. Note that this method does a case insensitive comparison. Args: filename_lower_case (str): filename of the plist in lower case. Returns: bool: True if the filename matches the filter.
Determines if a plist filename matches the filter.
[ "Determines", "if", "a", "plist", "filename", "matches", "the", "filter", "." ]
def Match(self, filename_lower_case): """Determines if a plist filename matches the filter. Note that this method does a case insensitive comparison. Args: filename_lower_case (str): filename of the plist in lower case. Returns: bool: True if the filename matches the filter. """ return bool(filename_lower_case == self._filename_lower_case)
[ "def", "Match", "(", "self", ",", "filename_lower_case", ")", ":", "return", "bool", "(", "filename_lower_case", "==", "self", ".", "_filename_lower_case", ")" ]
https://github.com/log2timeline/plaso/blob/fe2e316b8c76a0141760c0f2f181d84acb83abc2/plaso/parsers/plist_plugins/interface.py#L30-L41
taolei87/rcnn
7c45f497d4507047549480c2dc579866c76eec82
code/rationale/rationale_dependent.py
python
Model.evaluate_data
(self, batches_x, batches_y, eval_func, sampling=False)
return tot_obj/n, tot_mse/n, tot_diff/n, p1/n
[]
def evaluate_data(self, batches_x, batches_y, eval_func, sampling=False): padding_id = self.embedding_layer.vocab_map["<padding>"] tot_obj, tot_mse, tot_diff, p1 = 0.0, 0.0, 0.0, 0.0 for bx, by in zip(batches_x, batches_y): if not sampling: e, d = eval_func(bx, by) else: mask = bx != padding_id bz, o, e, d = eval_func(bx, by) p1 += np.sum(bz*mask) / (np.sum(mask) + 1e-8) tot_obj += o tot_mse += e tot_diff += d n = len(batches_x) if not sampling: return tot_mse/n, tot_diff/n return tot_obj/n, tot_mse/n, tot_diff/n, p1/n
[ "def", "evaluate_data", "(", "self", ",", "batches_x", ",", "batches_y", ",", "eval_func", ",", "sampling", "=", "False", ")", ":", "padding_id", "=", "self", ".", "embedding_layer", ".", "vocab_map", "[", "\"<padding>\"", "]", "tot_obj", ",", "tot_mse", ",", "tot_diff", ",", "p1", "=", "0.0", ",", "0.0", ",", "0.0", ",", "0.0", "for", "bx", ",", "by", "in", "zip", "(", "batches_x", ",", "batches_y", ")", ":", "if", "not", "sampling", ":", "e", ",", "d", "=", "eval_func", "(", "bx", ",", "by", ")", "else", ":", "mask", "=", "bx", "!=", "padding_id", "bz", ",", "o", ",", "e", ",", "d", "=", "eval_func", "(", "bx", ",", "by", ")", "p1", "+=", "np", ".", "sum", "(", "bz", "*", "mask", ")", "/", "(", "np", ".", "sum", "(", "mask", ")", "+", "1e-8", ")", "tot_obj", "+=", "o", "tot_mse", "+=", "e", "tot_diff", "+=", "d", "n", "=", "len", "(", "batches_x", ")", "if", "not", "sampling", ":", "return", "tot_mse", "/", "n", ",", "tot_diff", "/", "n", "return", "tot_obj", "/", "n", ",", "tot_mse", "/", "n", ",", "tot_diff", "/", "n", ",", "p1", "/", "n" ]
https://github.com/taolei87/rcnn/blob/7c45f497d4507047549480c2dc579866c76eec82/code/rationale/rationale_dependent.py#L527-L543
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/lib/django-1.3/django/contrib/gis/maps/google/gmap.py
python
GoogleMap.render
(self)
return render_to_string(self.template, params)
Generates the JavaScript necessary for displaying this Google Map.
Generates the JavaScript necessary for displaying this Google Map.
[ "Generates", "the", "JavaScript", "necessary", "for", "displaying", "this", "Google", "Map", "." ]
def render(self): """ Generates the JavaScript necessary for displaying this Google Map. """ params = {'calc_zoom' : self.calc_zoom, 'center' : self.center, 'dom_id' : self.dom_id, 'js_module' : self.js_module, 'kml_urls' : self.kml_urls, 'zoom' : self.zoom, 'polygons' : self.polygons, 'polylines' : self.polylines, 'icons': self.icons, 'markers' : self.markers, } params.update(self.extra_context) return render_to_string(self.template, params)
[ "def", "render", "(", "self", ")", ":", "params", "=", "{", "'calc_zoom'", ":", "self", ".", "calc_zoom", ",", "'center'", ":", "self", ".", "center", ",", "'dom_id'", ":", "self", ".", "dom_id", ",", "'js_module'", ":", "self", ".", "js_module", ",", "'kml_urls'", ":", "self", ".", "kml_urls", ",", "'zoom'", ":", "self", ".", "zoom", ",", "'polygons'", ":", "self", ".", "polygons", ",", "'polylines'", ":", "self", ".", "polylines", ",", "'icons'", ":", "self", ".", "icons", ",", "'markers'", ":", "self", ".", "markers", ",", "}", "params", ".", "update", "(", "self", ".", "extra_context", ")", "return", "render_to_string", "(", "self", ".", "template", ",", "params", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/django-1.3/django/contrib/gis/maps/google/gmap.py#L90-L106
jfalken/github_commit_crawler
a964d7dcd8b79b680ea142f7c25245babe4fd1e7
ghcc_process/libs/mongo_utils.py
python
mdb_insert_results
(results)
return oid
inserts result records into mongodb :param results: a list of result dicts, from `do_audit_events` returns object id of the insert
inserts result records into mongodb :param results: a list of result dicts, from `do_audit_events`
[ "inserts", "result", "records", "into", "mongodb", ":", "param", "results", ":", "a", "list", "of", "result", "dicts", "from", "do_audit_events" ]
def mdb_insert_results(results): ''' inserts result records into mongodb :param results: a list of result dicts, from `do_audit_events` returns object id of the insert ''' db = _connect_mongo() col = db['results'] col.ensure_index('uid') col.ensure_index('matched') oid = col.insert(results) return oid
[ "def", "mdb_insert_results", "(", "results", ")", ":", "db", "=", "_connect_mongo", "(", ")", "col", "=", "db", "[", "'results'", "]", "col", ".", "ensure_index", "(", "'uid'", ")", "col", ".", "ensure_index", "(", "'matched'", ")", "oid", "=", "col", ".", "insert", "(", "results", ")", "return", "oid" ]
https://github.com/jfalken/github_commit_crawler/blob/a964d7dcd8b79b680ea142f7c25245babe4fd1e7/ghcc_process/libs/mongo_utils.py#L55-L66
spotty-cloud/spotty
1127c56112b33ac4772582e4edb70e2dfa4292f0
spotty/config/abstract_instance_config.py
python
AbstractInstanceConfig._get_instance_volumes
(self)
Returns specific to the provider volumes that should be mounted on the host OS.
Returns specific to the provider volumes that should be mounted on the host OS.
[ "Returns", "specific", "to", "the", "provider", "volumes", "that", "should", "be", "mounted", "on", "the", "host", "OS", "." ]
def _get_instance_volumes(self) -> List[AbstractInstanceVolume]: """Returns specific to the provider volumes that should be mounted on the host OS.""" raise NotImplementedError
[ "def", "_get_instance_volumes", "(", "self", ")", "->", "List", "[", "AbstractInstanceVolume", "]", ":", "raise", "NotImplementedError" ]
https://github.com/spotty-cloud/spotty/blob/1127c56112b33ac4772582e4edb70e2dfa4292f0/spotty/config/abstract_instance_config.py#L49-L51
krintoxi/NoobSec-Toolkit
38738541cbc03cedb9a3b3ed13b629f781ad64f6
NoobSecToolkit - MAC OSX/tools/inject/thirdparty/odict/odict.py
python
_OrderedDict.__deepcopy__
(self, memo)
return self.__class__(deepcopy(self.items(), memo), self.strict)
To allow deepcopy to work with OrderedDict. >>> from copy import deepcopy >>> a = OrderedDict([(1, 1), (2, 2), (3, 3)]) >>> a['test'] = {} >>> b = deepcopy(a) >>> b == a True >>> b is a False >>> a['test'] is b['test'] False
To allow deepcopy to work with OrderedDict.
[ "To", "allow", "deepcopy", "to", "work", "with", "OrderedDict", "." ]
def __deepcopy__(self, memo): """ To allow deepcopy to work with OrderedDict. >>> from copy import deepcopy >>> a = OrderedDict([(1, 1), (2, 2), (3, 3)]) >>> a['test'] = {} >>> b = deepcopy(a) >>> b == a True >>> b is a False >>> a['test'] is b['test'] False """ from copy import deepcopy return self.__class__(deepcopy(self.items(), memo), self.strict)
[ "def", "__deepcopy__", "(", "self", ",", "memo", ")", ":", "from", "copy", "import", "deepcopy", "return", "self", ".", "__class__", "(", "deepcopy", "(", "self", ".", "items", "(", ")", ",", "memo", ")", ",", "self", ".", "strict", ")" ]
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit - MAC OSX/tools/inject/thirdparty/odict/odict.py#L460-L476