repo
stringlengths
7
55
path
stringlengths
4
223
url
stringlengths
87
315
code
stringlengths
75
104k
code_tokens
list
docstring
stringlengths
1
46.9k
docstring_tokens
list
language
stringclasses
1 value
partition
stringclasses
3 values
avg_line_len
float64
7.91
980
ransford/sllurp
sllurp/epc/gtin.py
https://github.com/ransford/sllurp/blob/d744b7e17d7ba64a24d9a31bde6cba65d91ad9b1/sllurp/epc/gtin.py#L4-L18
def calculate_check_digit(gtin): '''Given a GTIN (8-14) or SSCC, calculate its appropriate check digit''' reverse_gtin = gtin[::-1] total = 0 count = 0 for char in reverse_gtin: digit = int(char) if count % 2 == 0: digit = digit * 3 total = total + digit count = count + 1 nearest_multiple_of_ten = int(math.ceil(total / 10.0) * 10) return nearest_multiple_of_ten - total
[ "def", "calculate_check_digit", "(", "gtin", ")", ":", "reverse_gtin", "=", "gtin", "[", ":", ":", "-", "1", "]", "total", "=", "0", "count", "=", "0", "for", "char", "in", "reverse_gtin", ":", "digit", "=", "int", "(", "char", ")", "if", "count", "%", "2", "==", "0", ":", "digit", "=", "digit", "*", "3", "total", "=", "total", "+", "digit", "count", "=", "count", "+", "1", "nearest_multiple_of_ten", "=", "int", "(", "math", ".", "ceil", "(", "total", "/", "10.0", ")", "*", "10", ")", "return", "nearest_multiple_of_ten", "-", "total" ]
Given a GTIN (8-14) or SSCC, calculate its appropriate check digit
[ "Given", "a", "GTIN", "(", "8", "-", "14", ")", "or", "SSCC", "calculate", "its", "appropriate", "check", "digit" ]
python
train
28.733333
scrapinghub/adblockparser
adblockparser/parser.py
https://github.com/scrapinghub/adblockparser/blob/4089612d65018d38dbb88dd7f697bcb07814014d/adblockparser/parser.py#L434-L453
def _combined_regex(regexes, flags=re.IGNORECASE, use_re2=False, max_mem=None): """ Return a compiled regex combined (using OR) from a list of ``regexes``. If there is nothing to combine, None is returned. re2 library (https://github.com/axiak/pyre2) often can match and compile large regexes much faster than stdlib re module (10x is not uncommon), but there are some gotchas: * in case of "DFA out of memory" errors use ``max_mem`` argument to increase the amount of memory re2 is allowed to use. """ joined_regexes = "|".join(r for r in regexes if r) if not joined_regexes: return None if use_re2: import re2 return re2.compile(joined_regexes, flags=flags, max_mem=max_mem) return re.compile(joined_regexes, flags=flags)
[ "def", "_combined_regex", "(", "regexes", ",", "flags", "=", "re", ".", "IGNORECASE", ",", "use_re2", "=", "False", ",", "max_mem", "=", "None", ")", ":", "joined_regexes", "=", "\"|\"", ".", "join", "(", "r", "for", "r", "in", "regexes", "if", "r", ")", "if", "not", "joined_regexes", ":", "return", "None", "if", "use_re2", ":", "import", "re2", "return", "re2", ".", "compile", "(", "joined_regexes", ",", "flags", "=", "flags", ",", "max_mem", "=", "max_mem", ")", "return", "re", ".", "compile", "(", "joined_regexes", ",", "flags", "=", "flags", ")" ]
Return a compiled regex combined (using OR) from a list of ``regexes``. If there is nothing to combine, None is returned. re2 library (https://github.com/axiak/pyre2) often can match and compile large regexes much faster than stdlib re module (10x is not uncommon), but there are some gotchas: * in case of "DFA out of memory" errors use ``max_mem`` argument to increase the amount of memory re2 is allowed to use.
[ "Return", "a", "compiled", "regex", "combined", "(", "using", "OR", ")", "from", "a", "list", "of", "regexes", ".", "If", "there", "is", "nothing", "to", "combine", "None", "is", "returned", "." ]
python
train
39.25
NYUCCL/psiTurk
psiturk/amt_services.py
https://github.com/NYUCCL/psiTurk/blob/7170b992a0b5f56c165929cf87b3d3a1f3336c36/psiturk/amt_services.py#L228-L236
def validate_instance_size(self, size): ''' integer between 5-1024 (inclusive) ''' try: int(size) except ValueError: return '*** Error: size must be a whole number between 5 and 1024.' if int(size) < 5 or int(size) > 1024: return '*** Error: size must be between 5-1024 GB.' return True
[ "def", "validate_instance_size", "(", "self", ",", "size", ")", ":", "try", ":", "int", "(", "size", ")", "except", "ValueError", ":", "return", "'*** Error: size must be a whole number between 5 and 1024.'", "if", "int", "(", "size", ")", "<", "5", "or", "int", "(", "size", ")", ">", "1024", ":", "return", "'*** Error: size must be between 5-1024 GB.'", "return", "True" ]
integer between 5-1024 (inclusive)
[ "integer", "between", "5", "-", "1024", "(", "inclusive", ")" ]
python
train
39.333333
cs01/pygdbmi
pygdbmi/gdbcontroller.py
https://github.com/cs01/pygdbmi/blob/709c781794d3c3b903891f83da011d2d995895d1/pygdbmi/gdbcontroller.py#L248-L285
def get_gdb_response( self, timeout_sec=DEFAULT_GDB_TIMEOUT_SEC, raise_error_on_timeout=True ): """Get response from GDB, and block while doing so. If GDB does not have any response ready to be read by timeout_sec, an exception is raised. Args: timeout_sec (float): Maximum time to wait for reponse. Must be >= 0. Will return after raise_error_on_timeout (bool): Whether an exception should be raised if no response was found after timeout_sec Returns: List of parsed GDB responses, returned from gdbmiparser.parse_response, with the additional key 'stream' which is either 'stdout' or 'stderr' Raises: GdbTimeoutError if response is not received within timeout_sec ValueError if select returned unexpected file number NoGdbProcessError if there is no gdb subprocess running """ self.verify_valid_gdb_subprocess() if timeout_sec < 0: self.logger.warning("timeout_sec was negative, replacing with 0") timeout_sec = 0 if USING_WINDOWS: retval = self._get_responses_windows(timeout_sec) else: retval = self._get_responses_unix(timeout_sec) if not retval and raise_error_on_timeout: raise GdbTimeoutError( "Did not get response from gdb after %s seconds" % timeout_sec ) else: return retval
[ "def", "get_gdb_response", "(", "self", ",", "timeout_sec", "=", "DEFAULT_GDB_TIMEOUT_SEC", ",", "raise_error_on_timeout", "=", "True", ")", ":", "self", ".", "verify_valid_gdb_subprocess", "(", ")", "if", "timeout_sec", "<", "0", ":", "self", ".", "logger", ".", "warning", "(", "\"timeout_sec was negative, replacing with 0\"", ")", "timeout_sec", "=", "0", "if", "USING_WINDOWS", ":", "retval", "=", "self", ".", "_get_responses_windows", "(", "timeout_sec", ")", "else", ":", "retval", "=", "self", ".", "_get_responses_unix", "(", "timeout_sec", ")", "if", "not", "retval", "and", "raise_error_on_timeout", ":", "raise", "GdbTimeoutError", "(", "\"Did not get response from gdb after %s seconds\"", "%", "timeout_sec", ")", "else", ":", "return", "retval" ]
Get response from GDB, and block while doing so. If GDB does not have any response ready to be read by timeout_sec, an exception is raised. Args: timeout_sec (float): Maximum time to wait for reponse. Must be >= 0. Will return after raise_error_on_timeout (bool): Whether an exception should be raised if no response was found after timeout_sec Returns: List of parsed GDB responses, returned from gdbmiparser.parse_response, with the additional key 'stream' which is either 'stdout' or 'stderr' Raises: GdbTimeoutError if response is not received within timeout_sec ValueError if select returned unexpected file number NoGdbProcessError if there is no gdb subprocess running
[ "Get", "response", "from", "GDB", "and", "block", "while", "doing", "so", ".", "If", "GDB", "does", "not", "have", "any", "response", "ready", "to", "be", "read", "by", "timeout_sec", "an", "exception", "is", "raised", "." ]
python
valid
38.421053
kcallin/mqtt-codec
mqtt_codec/packet.py
https://github.com/kcallin/mqtt-codec/blob/0f754250cc3f44f4376777e7e8b3676c5a4d413a/mqtt_codec/packet.py#L82-L118
def are_flags_valid(packet_type, flags): """True when flags comply with [MQTT-2.2.2-1] requirements based on packet_type; False otherwise. Parameters ---------- packet_type: MqttControlPacketType flags: int Integer representation of 4-bit MQTT header flags field. Values outside of the range [0, 15] will certainly cause the function to return False. Returns ------- bool """ if packet_type == MqttControlPacketType.publish: rv = 0 <= flags <= 15 elif packet_type in (MqttControlPacketType.pubrel, MqttControlPacketType.subscribe, MqttControlPacketType.unsubscribe): rv = flags == 2 elif packet_type in (MqttControlPacketType.connect, MqttControlPacketType.connack, MqttControlPacketType.puback, MqttControlPacketType.pubrec, MqttControlPacketType.pubcomp, MqttControlPacketType.suback, MqttControlPacketType.unsuback, MqttControlPacketType.pingreq, MqttControlPacketType.pingresp, MqttControlPacketType.disconnect): rv = flags == 0 else: raise NotImplementedError(packet_type) return rv
[ "def", "are_flags_valid", "(", "packet_type", ",", "flags", ")", ":", "if", "packet_type", "==", "MqttControlPacketType", ".", "publish", ":", "rv", "=", "0", "<=", "flags", "<=", "15", "elif", "packet_type", "in", "(", "MqttControlPacketType", ".", "pubrel", ",", "MqttControlPacketType", ".", "subscribe", ",", "MqttControlPacketType", ".", "unsubscribe", ")", ":", "rv", "=", "flags", "==", "2", "elif", "packet_type", "in", "(", "MqttControlPacketType", ".", "connect", ",", "MqttControlPacketType", ".", "connack", ",", "MqttControlPacketType", ".", "puback", ",", "MqttControlPacketType", ".", "pubrec", ",", "MqttControlPacketType", ".", "pubcomp", ",", "MqttControlPacketType", ".", "suback", ",", "MqttControlPacketType", ".", "unsuback", ",", "MqttControlPacketType", ".", "pingreq", ",", "MqttControlPacketType", ".", "pingresp", ",", "MqttControlPacketType", ".", "disconnect", ")", ":", "rv", "=", "flags", "==", "0", "else", ":", "raise", "NotImplementedError", "(", "packet_type", ")", "return", "rv" ]
True when flags comply with [MQTT-2.2.2-1] requirements based on packet_type; False otherwise. Parameters ---------- packet_type: MqttControlPacketType flags: int Integer representation of 4-bit MQTT header flags field. Values outside of the range [0, 15] will certainly cause the function to return False. Returns ------- bool
[ "True", "when", "flags", "comply", "with", "[", "MQTT", "-", "2", ".", "2", ".", "2", "-", "1", "]", "requirements", "based", "on", "packet_type", ";", "False", "otherwise", "." ]
python
train
36.351351
DataBiosphere/toil
src/toil/wdl/wdl_analysis.py
https://github.com/DataBiosphere/toil/blob/a8252277ff814e7bee0971139c2344f88e44b644/src/toil/wdl/wdl_analysis.py#L772-L797
def parse_declaration_expressn_memberaccess(self, lhsAST, rhsAST, es): """ Instead of "Class.variablename", use "Class.rv('variablename')". :param lhsAST: :param rhsAST: :param es: :return: """ if isinstance(lhsAST, wdl_parser.Terminal): es = es + lhsAST.source_string elif isinstance(lhsAST, wdl_parser.Ast): raise NotImplementedError elif isinstance(lhsAST, wdl_parser.AstList): raise NotImplementedError es = es + '_' if isinstance(rhsAST, wdl_parser.Terminal): es = es + rhsAST.source_string elif isinstance(rhsAST, wdl_parser.Ast): raise NotImplementedError elif isinstance(rhsAST, wdl_parser.AstList): raise NotImplementedError return es
[ "def", "parse_declaration_expressn_memberaccess", "(", "self", ",", "lhsAST", ",", "rhsAST", ",", "es", ")", ":", "if", "isinstance", "(", "lhsAST", ",", "wdl_parser", ".", "Terminal", ")", ":", "es", "=", "es", "+", "lhsAST", ".", "source_string", "elif", "isinstance", "(", "lhsAST", ",", "wdl_parser", ".", "Ast", ")", ":", "raise", "NotImplementedError", "elif", "isinstance", "(", "lhsAST", ",", "wdl_parser", ".", "AstList", ")", ":", "raise", "NotImplementedError", "es", "=", "es", "+", "'_'", "if", "isinstance", "(", "rhsAST", ",", "wdl_parser", ".", "Terminal", ")", ":", "es", "=", "es", "+", "rhsAST", ".", "source_string", "elif", "isinstance", "(", "rhsAST", ",", "wdl_parser", ".", "Ast", ")", ":", "raise", "NotImplementedError", "elif", "isinstance", "(", "rhsAST", ",", "wdl_parser", ".", "AstList", ")", ":", "raise", "NotImplementedError", "return", "es" ]
Instead of "Class.variablename", use "Class.rv('variablename')". :param lhsAST: :param rhsAST: :param es: :return:
[ "Instead", "of", "Class", ".", "variablename", "use", "Class", ".", "rv", "(", "variablename", ")", "." ]
python
train
31.307692
cogniteev/docido-python-sdk
docido_sdk/toolbox/yaml_ext.py
https://github.com/cogniteev/docido-python-sdk/blob/58ecb6c6f5757fd40c0601657ab18368da7ddf33/docido_sdk/toolbox/yaml_ext.py#L7-L26
def envvar_constructor(loader, node): """Tag constructor to use environment variables in YAML files. Usage: - !TAG VARIABLE raise while loading the document if variable does not exists - !TAG VARIABLE:=DEFAULT_VALUE For instance: credentials: user: !env USER:=root group: !env GROUP:= root """ value = loader.construct_python_unicode(node) data = value.split(':=', 1) if len(data) == 2: var, default = data return os.environ.get(var, default) else: return os.environ[value]
[ "def", "envvar_constructor", "(", "loader", ",", "node", ")", ":", "value", "=", "loader", ".", "construct_python_unicode", "(", "node", ")", "data", "=", "value", ".", "split", "(", "':='", ",", "1", ")", "if", "len", "(", "data", ")", "==", "2", ":", "var", ",", "default", "=", "data", "return", "os", ".", "environ", ".", "get", "(", "var", ",", "default", ")", "else", ":", "return", "os", ".", "environ", "[", "value", "]" ]
Tag constructor to use environment variables in YAML files. Usage: - !TAG VARIABLE raise while loading the document if variable does not exists - !TAG VARIABLE:=DEFAULT_VALUE For instance: credentials: user: !env USER:=root group: !env GROUP:= root
[ "Tag", "constructor", "to", "use", "environment", "variables", "in", "YAML", "files", ".", "Usage", ":" ]
python
train
27.85
Exirel/pylint-json2html
pylint_json2html/__init__.py
https://github.com/Exirel/pylint-json2html/blob/7acdb4b7ea2f82a39a67d8ed3a43839c91cc423b/pylint_json2html/__init__.py#L188-L199
def on_close(self, stats, previous_stats): """Print the extended JSON report to reporter's output. :param dict stats: Metrics for the current pylint run :param dict previous_stats: Metrics for the previous pylint run """ reports = { 'messages': self._messages, 'stats': stats, 'previous': previous_stats, } print(json.dumps(reports, cls=JSONSetEncoder, indent=4), file=self.out)
[ "def", "on_close", "(", "self", ",", "stats", ",", "previous_stats", ")", ":", "reports", "=", "{", "'messages'", ":", "self", ".", "_messages", ",", "'stats'", ":", "stats", ",", "'previous'", ":", "previous_stats", ",", "}", "print", "(", "json", ".", "dumps", "(", "reports", ",", "cls", "=", "JSONSetEncoder", ",", "indent", "=", "4", ")", ",", "file", "=", "self", ".", "out", ")" ]
Print the extended JSON report to reporter's output. :param dict stats: Metrics for the current pylint run :param dict previous_stats: Metrics for the previous pylint run
[ "Print", "the", "extended", "JSON", "report", "to", "reporter", "s", "output", "." ]
python
train
38.333333
btel/svg_utils
src/svgutils/transform.py
https://github.com/btel/svg_utils/blob/ee00726ebed1bd97fd496b15b6a8e7f233ebb5e3/src/svgutils/transform.py#L82-L92
def skew_y(self, y): """Skew element along the y-axis by the given angle. Parameters ---------- y : float y-axis skew angle in degrees """ self.root.set("transform", "%s skewY(%f)" % (self.root.get("transform") or '', y)) return self
[ "def", "skew_y", "(", "self", ",", "y", ")", ":", "self", ".", "root", ".", "set", "(", "\"transform\"", ",", "\"%s skewY(%f)\"", "%", "(", "self", ".", "root", ".", "get", "(", "\"transform\"", ")", "or", "''", ",", "y", ")", ")", "return", "self" ]
Skew element along the y-axis by the given angle. Parameters ---------- y : float y-axis skew angle in degrees
[ "Skew", "element", "along", "the", "y", "-", "axis", "by", "the", "given", "angle", "." ]
python
train
28.545455
project-ncl/pnc-cli
pnc_cli/swagger_client/apis/licenses_api.py
https://github.com/project-ncl/pnc-cli/blob/3dc149bf84928f60a8044ac50b58bbaddd451902/pnc_cli/swagger_client/apis/licenses_api.py#L472-L497
def update(self, id, **kwargs): """ Updates an existing License This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.update(id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int id: License id (required) :param LicenseRest body: :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.update_with_http_info(id, **kwargs) else: (data) = self.update_with_http_info(id, **kwargs) return data
[ "def", "update", "(", "self", ",", "id", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'callback'", ")", ":", "return", "self", ".", "update_with_http_info", "(", "id", ",", "*", "*", "kwargs", ")", "else", ":", "(", "data", ")", "=", "self", ".", "update_with_http_info", "(", "id", ",", "*", "*", "kwargs", ")", "return", "data" ]
Updates an existing License This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.update(id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int id: License id (required) :param LicenseRest body: :return: None If the method is called asynchronously, returns the request thread.
[ "Updates", "an", "existing", "License", "This", "method", "makes", "a", "synchronous", "HTTP", "request", "by", "default", ".", "To", "make", "an", "asynchronous", "HTTP", "request", "please", "define", "a", "callback", "function", "to", "be", "invoked", "when", "receiving", "the", "response", ".", ">>>", "def", "callback_function", "(", "response", ")", ":", ">>>", "pprint", "(", "response", ")", ">>>", ">>>", "thread", "=", "api", ".", "update", "(", "id", "callback", "=", "callback_function", ")" ]
python
train
37.615385
caseyjlaw/rtpipe
rtpipe/RT.py
https://github.com/caseyjlaw/rtpipe/blob/ac33e4332cf215091a63afbb3137850876d73ec0/rtpipe/RT.py#L961-L969
def calc_nfalse(d): """ Calculate the number of thermal-noise false positives per segment. """ dtfactor = n.sum([1./i for i in d['dtarr']]) # assumes dedisperse-all algorithm ntrials = d['readints'] * dtfactor * len(d['dmarr']) * d['npixx'] * d['npixy'] qfrac = 1 - (erf(d['sigma_image1']/n.sqrt(2)) + 1)/2. nfalse = int(qfrac*ntrials) return nfalse
[ "def", "calc_nfalse", "(", "d", ")", ":", "dtfactor", "=", "n", ".", "sum", "(", "[", "1.", "/", "i", "for", "i", "in", "d", "[", "'dtarr'", "]", "]", ")", "# assumes dedisperse-all algorithm", "ntrials", "=", "d", "[", "'readints'", "]", "*", "dtfactor", "*", "len", "(", "d", "[", "'dmarr'", "]", ")", "*", "d", "[", "'npixx'", "]", "*", "d", "[", "'npixy'", "]", "qfrac", "=", "1", "-", "(", "erf", "(", "d", "[", "'sigma_image1'", "]", "/", "n", ".", "sqrt", "(", "2", ")", ")", "+", "1", ")", "/", "2.", "nfalse", "=", "int", "(", "qfrac", "*", "ntrials", ")", "return", "nfalse" ]
Calculate the number of thermal-noise false positives per segment.
[ "Calculate", "the", "number", "of", "thermal", "-", "noise", "false", "positives", "per", "segment", "." ]
python
train
41.444444
acorg/dark-matter
dark/titles.py
https://github.com/acorg/dark-matter/blob/c78a1bf262667fa5db3548fa7066c4ec14d0551d/dark/titles.py#L38-L47
def toDict(self): """ Get information about a title alignment as a dictionary. @return: A C{dict} representation of the title aligment. """ return { 'hsps': [hsp.toDict() for hsp in self.hsps], 'read': self.read.toDict(), }
[ "def", "toDict", "(", "self", ")", ":", "return", "{", "'hsps'", ":", "[", "hsp", ".", "toDict", "(", ")", "for", "hsp", "in", "self", ".", "hsps", "]", ",", "'read'", ":", "self", ".", "read", ".", "toDict", "(", ")", ",", "}" ]
Get information about a title alignment as a dictionary. @return: A C{dict} representation of the title aligment.
[ "Get", "information", "about", "a", "title", "alignment", "as", "a", "dictionary", "." ]
python
train
28.7
googleapis/google-cloud-python
bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_instance_admin_client.py
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_instance_admin_client.py#L112-L119
def cluster_path(cls, project, instance, cluster): """Return a fully-qualified cluster string.""" return google.api_core.path_template.expand( "projects/{project}/instances/{instance}/clusters/{cluster}", project=project, instance=instance, cluster=cluster, )
[ "def", "cluster_path", "(", "cls", ",", "project", ",", "instance", ",", "cluster", ")", ":", "return", "google", ".", "api_core", ".", "path_template", ".", "expand", "(", "\"projects/{project}/instances/{instance}/clusters/{cluster}\"", ",", "project", "=", "project", ",", "instance", "=", "instance", ",", "cluster", "=", "cluster", ",", ")" ]
Return a fully-qualified cluster string.
[ "Return", "a", "fully", "-", "qualified", "cluster", "string", "." ]
python
train
40.5
sernst/cauldron
cauldron/session/display/__init__.py
https://github.com/sernst/cauldron/blob/4086aec9c038c402ea212c79fe8bd0d27104f9cf/cauldron/session/display/__init__.py#L403-L429
def listing( source: list, ordered: bool = False, expand_full: bool = False ): """ An unordered or ordered list of the specified *source* iterable where each element is converted to a string representation for display. :param source: The iterable to display as a list. :param ordered: Whether or not the list should be ordered. If False, which is the default, an unordered bulleted list is created. :param expand_full: Whether or not the list should expand to fill the screen horizontally. When defaulted to False, the list is constrained to the center view area of the screen along with other text. This can be useful to keep lists aligned with the text flow. """ r = _get_report() r.append_body(render.listing( source=source, ordered=ordered, expand_full=expand_full )) r.stdout_interceptor.write_source('[ADDED] Listing\n')
[ "def", "listing", "(", "source", ":", "list", ",", "ordered", ":", "bool", "=", "False", ",", "expand_full", ":", "bool", "=", "False", ")", ":", "r", "=", "_get_report", "(", ")", "r", ".", "append_body", "(", "render", ".", "listing", "(", "source", "=", "source", ",", "ordered", "=", "ordered", ",", "expand_full", "=", "expand_full", ")", ")", "r", ".", "stdout_interceptor", ".", "write_source", "(", "'[ADDED] Listing\\n'", ")" ]
An unordered or ordered list of the specified *source* iterable where each element is converted to a string representation for display. :param source: The iterable to display as a list. :param ordered: Whether or not the list should be ordered. If False, which is the default, an unordered bulleted list is created. :param expand_full: Whether or not the list should expand to fill the screen horizontally. When defaulted to False, the list is constrained to the center view area of the screen along with other text. This can be useful to keep lists aligned with the text flow.
[ "An", "unordered", "or", "ordered", "list", "of", "the", "specified", "*", "source", "*", "iterable", "where", "each", "element", "is", "converted", "to", "a", "string", "representation", "for", "display", "." ]
python
train
35.148148
breuleux/hrepr
hrepr/__init__.py
https://github.com/breuleux/hrepr/blob/a411395d31ac7c8c071d174e63a093751aa5997b/hrepr/__init__.py#L342-L383
def titled_box(self, titles, contents, tdir='h', cdir='h'): """ Helper function to build a box containing a list of elements, with a title above and/or below, or left and/or right of the box. (e.g. a class name on top, or brackets on both sides.) The elements given must already have been transformed into Tag instances. Arguments: titles: A pair of strings to display on top and bottom (if tdir=='v') or left and right (if tdir=='h'). If either or both titles are None, they will be omitted. contents: A list of Tags. tdir: tdir=='h' (default) means the titles will be on the left and right. tdir=='v' means they will be on top and bottom. cdir: cdir=='h' (default) means the contents will be stacked horizontally. cdir=='v' means they will be stacked vertically. """ H = self.H def wrapt(x): return H.div['hrepr-title'](x) rval = H.div[f'hrepr-titled-{tdir}'] contents = H.div[f'hrepr-contents-{cdir}'].fill(contents) if isinstance(titles, tuple) and len(titles) == 2: open, close = titles else: open, close = titles, None if open: rval = rval(wrapt(open)) rval = rval(contents) if close: rval = rval(wrapt(close)) return rval
[ "def", "titled_box", "(", "self", ",", "titles", ",", "contents", ",", "tdir", "=", "'h'", ",", "cdir", "=", "'h'", ")", ":", "H", "=", "self", ".", "H", "def", "wrapt", "(", "x", ")", ":", "return", "H", ".", "div", "[", "'hrepr-title'", "]", "(", "x", ")", "rval", "=", "H", ".", "div", "[", "f'hrepr-titled-{tdir}'", "]", "contents", "=", "H", ".", "div", "[", "f'hrepr-contents-{cdir}'", "]", ".", "fill", "(", "contents", ")", "if", "isinstance", "(", "titles", ",", "tuple", ")", "and", "len", "(", "titles", ")", "==", "2", ":", "open", ",", "close", "=", "titles", "else", ":", "open", ",", "close", "=", "titles", ",", "None", "if", "open", ":", "rval", "=", "rval", "(", "wrapt", "(", "open", ")", ")", "rval", "=", "rval", "(", "contents", ")", "if", "close", ":", "rval", "=", "rval", "(", "wrapt", "(", "close", ")", ")", "return", "rval" ]
Helper function to build a box containing a list of elements, with a title above and/or below, or left and/or right of the box. (e.g. a class name on top, or brackets on both sides.) The elements given must already have been transformed into Tag instances. Arguments: titles: A pair of strings to display on top and bottom (if tdir=='v') or left and right (if tdir=='h'). If either or both titles are None, they will be omitted. contents: A list of Tags. tdir: tdir=='h' (default) means the titles will be on the left and right. tdir=='v' means they will be on top and bottom. cdir: cdir=='h' (default) means the contents will be stacked horizontally. cdir=='v' means they will be stacked vertically.
[ "Helper", "function", "to", "build", "a", "box", "containing", "a", "list", "of", "elements", "with", "a", "title", "above", "and", "/", "or", "below", "or", "left", "and", "/", "or", "right", "of", "the", "box", ".", "(", "e", ".", "g", ".", "a", "class", "name", "on", "top", "or", "brackets", "on", "both", "sides", ".", ")" ]
python
train
34.714286
mariano/pyfire
pyfire/room.py
https://github.com/mariano/pyfire/blob/42e3490c138abc8e10f2e9f8f8f3b40240a80412/pyfire/room.py#L143-L165
def speak(self, message): """ Post a message. Args: message (:class:`Message` or string): Message Returns: bool. Success """ campfire = self.get_campfire() if not isinstance(message, Message): message = Message(campfire, message) result = self._connection.post( "room/%s/speak" % self.id, {"message": message.get_data()}, parse_data=True, key="message" ) if result["success"]: return Message(campfire, result["data"]) return result["success"]
[ "def", "speak", "(", "self", ",", "message", ")", ":", "campfire", "=", "self", ".", "get_campfire", "(", ")", "if", "not", "isinstance", "(", "message", ",", "Message", ")", ":", "message", "=", "Message", "(", "campfire", ",", "message", ")", "result", "=", "self", ".", "_connection", ".", "post", "(", "\"room/%s/speak\"", "%", "self", ".", "id", ",", "{", "\"message\"", ":", "message", ".", "get_data", "(", ")", "}", ",", "parse_data", "=", "True", ",", "key", "=", "\"message\"", ")", "if", "result", "[", "\"success\"", "]", ":", "return", "Message", "(", "campfire", ",", "result", "[", "\"data\"", "]", ")", "return", "result", "[", "\"success\"", "]" ]
Post a message. Args: message (:class:`Message` or string): Message Returns: bool. Success
[ "Post", "a", "message", "." ]
python
valid
26.086957
portfors-lab/sparkle
sparkle/gui/stim/auto_parameters_editor.py
https://github.com/portfors-lab/sparkle/blob/5fad1cf2bec58ec6b15d91da20f6236a74826110/sparkle/gui/stim/auto_parameters_editor.py#L71-L91
def showEvent(self, event): """When this widget is shown it has an effect of putting other widgets in the parent widget into different editing modes, emits signal to notify other widgets. Restores the previous selection the last time this widget was visible""" selected = self.paramList.selectedIndexes() model = self.paramList.model() self.visibilityChanged.emit(1) if len(selected) > 0: # select the correct components in the StimulusView self.paramList.parameterChanged.emit(model.selection(selected[0])) self.hintRequested.emit('Select parameter to edit. \n\nParameter must have selected components in order to edit fields') elif model.rowCount() > 0: # just select first item self.paramList.selectRow(0) self.paramList.parameterChanged.emit(model.selection(model.index(0,0))) self.hintRequested.emit('Select parameter to edit. \n\nParameter must have selected components in order to edit fields') else: model.emptied.emit(True) self.hintRequested.emit('To add a parameter, Drag "Add" onto empty auto-parameter table')
[ "def", "showEvent", "(", "self", ",", "event", ")", ":", "selected", "=", "self", ".", "paramList", ".", "selectedIndexes", "(", ")", "model", "=", "self", ".", "paramList", ".", "model", "(", ")", "self", ".", "visibilityChanged", ".", "emit", "(", "1", ")", "if", "len", "(", "selected", ")", ">", "0", ":", "# select the correct components in the StimulusView", "self", ".", "paramList", ".", "parameterChanged", ".", "emit", "(", "model", ".", "selection", "(", "selected", "[", "0", "]", ")", ")", "self", ".", "hintRequested", ".", "emit", "(", "'Select parameter to edit. \\n\\nParameter must have selected components in order to edit fields'", ")", "elif", "model", ".", "rowCount", "(", ")", ">", "0", ":", "# just select first item", "self", ".", "paramList", ".", "selectRow", "(", "0", ")", "self", ".", "paramList", ".", "parameterChanged", ".", "emit", "(", "model", ".", "selection", "(", "model", ".", "index", "(", "0", ",", "0", ")", ")", ")", "self", ".", "hintRequested", ".", "emit", "(", "'Select parameter to edit. \\n\\nParameter must have selected components in order to edit fields'", ")", "else", ":", "model", ".", "emptied", ".", "emit", "(", "True", ")", "self", ".", "hintRequested", ".", "emit", "(", "'To add a parameter, Drag \"Add\" onto empty auto-parameter table'", ")" ]
When this widget is shown it has an effect of putting other widgets in the parent widget into different editing modes, emits signal to notify other widgets. Restores the previous selection the last time this widget was visible
[ "When", "this", "widget", "is", "shown", "it", "has", "an", "effect", "of", "putting", "other", "widgets", "in", "the", "parent", "widget", "into", "different", "editing", "modes", "emits", "signal", "to", "notify", "other", "widgets", ".", "Restores", "the", "previous", "selection", "the", "last", "time", "this", "widget", "was", "visible" ]
python
train
56.714286
obulpathi/cdn-fastly-python
fastly/__init__.py
https://github.com/obulpathi/cdn-fastly-python/blob/db2564b047e8af4bce72c3b88d6c27d3d0291425/fastly/__init__.py#L784-L787
def get_syslog(self, service_id, version_number, name): """Get the Syslog for a particular service and version.""" content = self._fetch("/service/%s/version/%d/syslog/%s" % (service_id, version_number, name)) return FastlySyslog(self, content)
[ "def", "get_syslog", "(", "self", ",", "service_id", ",", "version_number", ",", "name", ")", ":", "content", "=", "self", ".", "_fetch", "(", "\"/service/%s/version/%d/syslog/%s\"", "%", "(", "service_id", ",", "version_number", ",", "name", ")", ")", "return", "FastlySyslog", "(", "self", ",", "content", ")" ]
Get the Syslog for a particular service and version.
[ "Get", "the", "Syslog", "for", "a", "particular", "service", "and", "version", "." ]
python
train
61.75
google/grr
grr/server/grr_response_server/threadpool.py
https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/server/grr_response_server/threadpool.py#L458-L466
def Join(self): """Waits until all outstanding tasks are completed.""" for _ in range(self.JOIN_TIMEOUT_DECISECONDS): if self._queue.empty() and not self.busy_threads: return time.sleep(0.1) raise ValueError("Timeout during Join() for threadpool %s." % self.name)
[ "def", "Join", "(", "self", ")", ":", "for", "_", "in", "range", "(", "self", ".", "JOIN_TIMEOUT_DECISECONDS", ")", ":", "if", "self", ".", "_queue", ".", "empty", "(", ")", "and", "not", "self", ".", "busy_threads", ":", "return", "time", ".", "sleep", "(", "0.1", ")", "raise", "ValueError", "(", "\"Timeout during Join() for threadpool %s.\"", "%", "self", ".", "name", ")" ]
Waits until all outstanding tasks are completed.
[ "Waits", "until", "all", "outstanding", "tasks", "are", "completed", "." ]
python
train
32.111111
ihgazni2/elist
elist/elist.py
https://github.com/ihgazni2/elist/blob/8c07b5029bda34ead60ce10335ceb145f209263c/elist/elist.py#L974-L988
def select_seqs(ol,seqs): ''' from elist.elist import * ol = ['a','b','c','d'] select_seqs(ol,[1,2]) ''' rslt =copy.deepcopy(ol) rslt = itemgetter(*seqs)(ol) if(seqs.__len__()==0): rslt = [] elif(seqs.__len__()==1): rslt = [rslt] else: rslt = list(rslt) return(rslt)
[ "def", "select_seqs", "(", "ol", ",", "seqs", ")", ":", "rslt", "=", "copy", ".", "deepcopy", "(", "ol", ")", "rslt", "=", "itemgetter", "(", "*", "seqs", ")", "(", "ol", ")", "if", "(", "seqs", ".", "__len__", "(", ")", "==", "0", ")", ":", "rslt", "=", "[", "]", "elif", "(", "seqs", ".", "__len__", "(", ")", "==", "1", ")", ":", "rslt", "=", "[", "rslt", "]", "else", ":", "rslt", "=", "list", "(", "rslt", ")", "return", "(", "rslt", ")" ]
from elist.elist import * ol = ['a','b','c','d'] select_seqs(ol,[1,2])
[ "from", "elist", ".", "elist", "import", "*", "ol", "=", "[", "a", "b", "c", "d", "]", "select_seqs", "(", "ol", "[", "1", "2", "]", ")" ]
python
valid
22.133333
umutbozkurt/django-rest-framework-mongoengine
rest_framework_mongoengine/serializers.py
https://github.com/umutbozkurt/django-rest-framework-mongoengine/blob/2fe6de53907b31a5e8b742e4c6b728942b5fa4f0/rest_framework_mongoengine/serializers.py#L542-L562
def apply_customization(self, serializer, customization): """ Applies fields customization to a nested or embedded DocumentSerializer. """ # apply fields or exclude if customization.fields is not None: if len(customization.fields) == 0: # customization fields are empty, set Meta.fields to '__all__' serializer.Meta.fields = ALL_FIELDS else: serializer.Meta.fields = customization.fields if customization.exclude is not None: serializer.Meta.exclude = customization.exclude # apply extra_kwargs if customization.extra_kwargs is not None: serializer.Meta.extra_kwargs = customization.extra_kwargs # apply validate_methods for method_name, method in customization.validate_methods.items(): setattr(serializer, method_name, method)
[ "def", "apply_customization", "(", "self", ",", "serializer", ",", "customization", ")", ":", "# apply fields or exclude", "if", "customization", ".", "fields", "is", "not", "None", ":", "if", "len", "(", "customization", ".", "fields", ")", "==", "0", ":", "# customization fields are empty, set Meta.fields to '__all__'", "serializer", ".", "Meta", ".", "fields", "=", "ALL_FIELDS", "else", ":", "serializer", ".", "Meta", ".", "fields", "=", "customization", ".", "fields", "if", "customization", ".", "exclude", "is", "not", "None", ":", "serializer", ".", "Meta", ".", "exclude", "=", "customization", ".", "exclude", "# apply extra_kwargs", "if", "customization", ".", "extra_kwargs", "is", "not", "None", ":", "serializer", ".", "Meta", ".", "extra_kwargs", "=", "customization", ".", "extra_kwargs", "# apply validate_methods", "for", "method_name", ",", "method", "in", "customization", ".", "validate_methods", ".", "items", "(", ")", ":", "setattr", "(", "serializer", ",", "method_name", ",", "method", ")" ]
Applies fields customization to a nested or embedded DocumentSerializer.
[ "Applies", "fields", "customization", "to", "a", "nested", "or", "embedded", "DocumentSerializer", "." ]
python
train
42.761905
nickmckay/LiPD-utilities
Python/lipd/dataframes.py
https://github.com/nickmckay/LiPD-utilities/blob/5dab6bbeffc5effd68e3a6beaca6b76aa928e860/Python/lipd/dataframes.py#L240-L299
def _match_dfs_expr(lo_meta, expr, tt): """ Use the given expression to get all data frames that match the criteria (i.e. "paleo measurement tables") :param dict lo_meta: Lipd object metadata :param str expr: Search expression :param str tt: Table type (chron or paleo) :return list: All filenames that match the expression """ logger_dataframes.info("enter match_dfs_expr") filenames = [] s = "{}Data".format(tt) # Top table level. Going through all tables of certain type (i.e. chron or paleo) for k, v in lo_meta["{}Data".format(tt)].items(): # Inner table level. Get data from one specific table if "measurement" in expr: for k1, v1 in v["{}MeasurementTable".format(tt)].items(): try: f = v1["filename"] if f.endswith(".csv"): filenames.append(f) except KeyError: # Not concerned if the key wasn't found. logger_dataframes.info("match_dfs_expr: KeyError: filename not found in: {} {}".format(tt, "measurement")) elif "ensemble" in expr: for k1, v1 in v["{}Model".format(tt)].items(): try: f = v1["ensembleTable"]["filename"] if f.endswith(".csv"): filenames.append(f) except KeyError: # Not concerned if the key wasn't found. logger_dataframes.info("match_dfs_expr: KeyError: filename not found in: {} {}".format(tt, "ensemble")) elif "model" in expr: for k1, v1 in v["{}Model".format(tt)].items(): try: f = v1["{}ModelTable".format(tt)]["filename"] if f.endswith(".csv"): filenames.append(f) except KeyError: # Not concerned if the key wasn't found. logger_dataframes.info("match_dfs_expr: KeyError: filename not found in: {} {}".format(tt, "model")) elif "dist" in expr: for k1, v1 in v["{}Model".format(tt)].items(): for k2, v2 in v1["distribution"].items(): try: f = v2["filename"] if f.endswith(".csv"): filenames.append(f) except KeyError: # Not concerned if the key wasn't found. logger_dataframes.info( "match_dfs_expr: KeyError: filename not found in: {} {}".format(tt, "dist")) logger_dataframes.info("exit match_dfs_expr") return filenames
[ "def", "_match_dfs_expr", "(", "lo_meta", ",", "expr", ",", "tt", ")", ":", "logger_dataframes", ".", "info", "(", "\"enter match_dfs_expr\"", ")", "filenames", "=", "[", "]", "s", "=", "\"{}Data\"", ".", "format", "(", "tt", ")", "# Top table level. Going through all tables of certain type (i.e. chron or paleo)", "for", "k", ",", "v", "in", "lo_meta", "[", "\"{}Data\"", ".", "format", "(", "tt", ")", "]", ".", "items", "(", ")", ":", "# Inner table level. Get data from one specific table", "if", "\"measurement\"", "in", "expr", ":", "for", "k1", ",", "v1", "in", "v", "[", "\"{}MeasurementTable\"", ".", "format", "(", "tt", ")", "]", ".", "items", "(", ")", ":", "try", ":", "f", "=", "v1", "[", "\"filename\"", "]", "if", "f", ".", "endswith", "(", "\".csv\"", ")", ":", "filenames", ".", "append", "(", "f", ")", "except", "KeyError", ":", "# Not concerned if the key wasn't found.", "logger_dataframes", ".", "info", "(", "\"match_dfs_expr: KeyError: filename not found in: {} {}\"", ".", "format", "(", "tt", ",", "\"measurement\"", ")", ")", "elif", "\"ensemble\"", "in", "expr", ":", "for", "k1", ",", "v1", "in", "v", "[", "\"{}Model\"", ".", "format", "(", "tt", ")", "]", ".", "items", "(", ")", ":", "try", ":", "f", "=", "v1", "[", "\"ensembleTable\"", "]", "[", "\"filename\"", "]", "if", "f", ".", "endswith", "(", "\".csv\"", ")", ":", "filenames", ".", "append", "(", "f", ")", "except", "KeyError", ":", "# Not concerned if the key wasn't found.", "logger_dataframes", ".", "info", "(", "\"match_dfs_expr: KeyError: filename not found in: {} {}\"", ".", "format", "(", "tt", ",", "\"ensemble\"", ")", ")", "elif", "\"model\"", "in", "expr", ":", "for", "k1", ",", "v1", "in", "v", "[", "\"{}Model\"", ".", "format", "(", "tt", ")", "]", ".", "items", "(", ")", ":", "try", ":", "f", "=", "v1", "[", "\"{}ModelTable\"", ".", "format", "(", "tt", ")", "]", "[", "\"filename\"", "]", "if", "f", ".", "endswith", "(", "\".csv\"", ")", ":", "filenames", ".", "append", "(", "f", ")", "except", "KeyError", ":", "# Not concerned if the key wasn't found.", "logger_dataframes", ".", "info", "(", "\"match_dfs_expr: KeyError: filename not found in: {} {}\"", ".", "format", "(", "tt", ",", "\"model\"", ")", ")", "elif", "\"dist\"", "in", "expr", ":", "for", "k1", ",", "v1", "in", "v", "[", "\"{}Model\"", ".", "format", "(", "tt", ")", "]", ".", "items", "(", ")", ":", "for", "k2", ",", "v2", "in", "v1", "[", "\"distribution\"", "]", ".", "items", "(", ")", ":", "try", ":", "f", "=", "v2", "[", "\"filename\"", "]", "if", "f", ".", "endswith", "(", "\".csv\"", ")", ":", "filenames", ".", "append", "(", "f", ")", "except", "KeyError", ":", "# Not concerned if the key wasn't found.", "logger_dataframes", ".", "info", "(", "\"match_dfs_expr: KeyError: filename not found in: {} {}\"", ".", "format", "(", "tt", ",", "\"dist\"", ")", ")", "logger_dataframes", ".", "info", "(", "\"exit match_dfs_expr\"", ")", "return", "filenames" ]
Use the given expression to get all data frames that match the criteria (i.e. "paleo measurement tables") :param dict lo_meta: Lipd object metadata :param str expr: Search expression :param str tt: Table type (chron or paleo) :return list: All filenames that match the expression
[ "Use", "the", "given", "expression", "to", "get", "all", "data", "frames", "that", "match", "the", "criteria", "(", "i", ".", "e", ".", "paleo", "measurement", "tables", ")", ":", "param", "dict", "lo_meta", ":", "Lipd", "object", "metadata", ":", "param", "str", "expr", ":", "Search", "expression", ":", "param", "str", "tt", ":", "Table", "type", "(", "chron", "or", "paleo", ")", ":", "return", "list", ":", "All", "filenames", "that", "match", "the", "expression" ]
python
train
44.35
delph-in/pydelphin
delphin/mrs/simplemrs.py
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/simplemrs.py#L475-L492
def _serialize_lnk(lnk): """Serialize a predication lnk to surface form into the SimpleMRS encoding.""" s = "" if lnk is not None: s = '<' if lnk.type == Lnk.CHARSPAN: cfrom, cto = lnk.data s += ''.join([str(cfrom), ':', str(cto)]) elif lnk.type == Lnk.CHARTSPAN: cfrom, cto = lnk.data s += ''.join([str(cfrom), '#', str(cto)]) elif lnk.type == Lnk.TOKENS: s += ' '.join([str(t) for t in lnk.data]) elif lnk.type == Lnk.EDGE: s += ''.join(['@', str(lnk.data)]) s += '>' return s
[ "def", "_serialize_lnk", "(", "lnk", ")", ":", "s", "=", "\"\"", "if", "lnk", "is", "not", "None", ":", "s", "=", "'<'", "if", "lnk", ".", "type", "==", "Lnk", ".", "CHARSPAN", ":", "cfrom", ",", "cto", "=", "lnk", ".", "data", "s", "+=", "''", ".", "join", "(", "[", "str", "(", "cfrom", ")", ",", "':'", ",", "str", "(", "cto", ")", "]", ")", "elif", "lnk", ".", "type", "==", "Lnk", ".", "CHARTSPAN", ":", "cfrom", ",", "cto", "=", "lnk", ".", "data", "s", "+=", "''", ".", "join", "(", "[", "str", "(", "cfrom", ")", ",", "'#'", ",", "str", "(", "cto", ")", "]", ")", "elif", "lnk", ".", "type", "==", "Lnk", ".", "TOKENS", ":", "s", "+=", "' '", ".", "join", "(", "[", "str", "(", "t", ")", "for", "t", "in", "lnk", ".", "data", "]", ")", "elif", "lnk", ".", "type", "==", "Lnk", ".", "EDGE", ":", "s", "+=", "''", ".", "join", "(", "[", "'@'", ",", "str", "(", "lnk", ".", "data", ")", "]", ")", "s", "+=", "'>'", "return", "s" ]
Serialize a predication lnk to surface form into the SimpleMRS encoding.
[ "Serialize", "a", "predication", "lnk", "to", "surface", "form", "into", "the", "SimpleMRS", "encoding", "." ]
python
train
33.555556
StackStorm/pybind
pybind/slxos/v17r_1_01a/isis_state/router_isis_config/__init__.py
https://github.com/StackStorm/pybind/blob/44c467e71b2b425be63867aba6e6fa28b2cfe7fb/pybind/slxos/v17r_1_01a/isis_state/router_isis_config/__init__.py#L1330-L1353
def _set_pspf_timer(self, v, load=False): """ Setter method for pspf_timer, mapped from YANG variable /isis_state/router_isis_config/pspf_timer (container) If this variable is read-only (config: false) in the source YANG file, then _set_pspf_timer is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_pspf_timer() directly. YANG Description: Timer for IS-IS Partial SPF calculation for IPv4 """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=pspf_timer.pspf_timer, is_container='container', presence=False, yang_name="pspf-timer", rest_name="pspf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-pspf-timer', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """pspf_timer must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=pspf_timer.pspf_timer, is_container='container', presence=False, yang_name="pspf-timer", rest_name="pspf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-pspf-timer', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""", }) self.__pspf_timer = t if hasattr(self, '_set'): self._set()
[ "def", "_set_pspf_timer", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "pspf_timer", ".", "pspf_timer", ",", "is_container", "=", "'container'", ",", "presence", "=", "False", ",", "yang_name", "=", "\"pspf-timer\"", ",", "rest_name", "=", "\"pspf-timer\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'callpoint'", ":", "u'isis-pspf-timer'", ",", "u'cli-suppress-show-path'", ":", "None", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-isis-operational'", ",", "defining_module", "=", "'brocade-isis-operational'", ",", "yang_type", "=", "'container'", ",", "is_config", "=", "False", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"pspf_timer must be of a type compatible with container\"\"\"", ",", "'defined-type'", ":", "\"container\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=pspf_timer.pspf_timer, is_container='container', presence=False, yang_name=\"pspf-timer\", rest_name=\"pspf-timer\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-pspf-timer', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)\"\"\"", ",", "}", ")", "self", ".", "__pspf_timer", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
Setter method for pspf_timer, mapped from YANG variable /isis_state/router_isis_config/pspf_timer (container) If this variable is read-only (config: false) in the source YANG file, then _set_pspf_timer is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_pspf_timer() directly. YANG Description: Timer for IS-IS Partial SPF calculation for IPv4
[ "Setter", "method", "for", "pspf_timer", "mapped", "from", "YANG", "variable", "/", "isis_state", "/", "router_isis_config", "/", "pspf_timer", "(", "container", ")", "If", "this", "variable", "is", "read", "-", "only", "(", "config", ":", "false", ")", "in", "the", "source", "YANG", "file", "then", "_set_pspf_timer", "is", "considered", "as", "a", "private", "method", ".", "Backends", "looking", "to", "populate", "this", "variable", "should", "do", "so", "via", "calling", "thisObj", ".", "_set_pspf_timer", "()", "directly", "." ]
python
train
74.25
tensorlayer/tensorlayer
tensorlayer/nlp.py
https://github.com/tensorlayer/tensorlayer/blob/aa9e52e36c7058a7e6fd81d36563ca6850b21956/tensorlayer/nlp.py#L335-L375
def process_sentence(sentence, start_word="<S>", end_word="</S>"): """Seperate a sentence string into a list of string words, add start_word and end_word, see ``create_vocab()`` and ``tutorial_tfrecord3.py``. Parameters ---------- sentence : str A sentence. start_word : str or None The start word. If None, no start word will be appended. end_word : str or None The end word. If None, no end word will be appended. Returns --------- list of str A list of strings that separated into words. Examples ----------- >>> c = "how are you?" >>> c = tl.nlp.process_sentence(c) >>> print(c) ['<S>', 'how', 'are', 'you', '?', '</S>'] Notes ------- - You have to install the following package. - `Installing NLTK <http://www.nltk.org/install.html>`__ - `Installing NLTK data <http://www.nltk.org/data.html>`__ """ if start_word is not None: process_sentence = [start_word] else: process_sentence = [] process_sentence.extend(nltk.tokenize.word_tokenize(sentence.lower())) if end_word is not None: process_sentence.append(end_word) return process_sentence
[ "def", "process_sentence", "(", "sentence", ",", "start_word", "=", "\"<S>\"", ",", "end_word", "=", "\"</S>\"", ")", ":", "if", "start_word", "is", "not", "None", ":", "process_sentence", "=", "[", "start_word", "]", "else", ":", "process_sentence", "=", "[", "]", "process_sentence", ".", "extend", "(", "nltk", ".", "tokenize", ".", "word_tokenize", "(", "sentence", ".", "lower", "(", ")", ")", ")", "if", "end_word", "is", "not", "None", ":", "process_sentence", ".", "append", "(", "end_word", ")", "return", "process_sentence" ]
Seperate a sentence string into a list of string words, add start_word and end_word, see ``create_vocab()`` and ``tutorial_tfrecord3.py``. Parameters ---------- sentence : str A sentence. start_word : str or None The start word. If None, no start word will be appended. end_word : str or None The end word. If None, no end word will be appended. Returns --------- list of str A list of strings that separated into words. Examples ----------- >>> c = "how are you?" >>> c = tl.nlp.process_sentence(c) >>> print(c) ['<S>', 'how', 'are', 'you', '?', '</S>'] Notes ------- - You have to install the following package. - `Installing NLTK <http://www.nltk.org/install.html>`__ - `Installing NLTK data <http://www.nltk.org/data.html>`__
[ "Seperate", "a", "sentence", "string", "into", "a", "list", "of", "string", "words", "add", "start_word", "and", "end_word", "see", "create_vocab", "()", "and", "tutorial_tfrecord3", ".", "py", "." ]
python
valid
28.634146
deepmind/sonnet
sonnet/python/modules/conv.py
https://github.com/deepmind/sonnet/blob/00612ca3178964d86b556e062694d808ff81fcca/sonnet/python/modules/conv.py#L1159-L1183
def _recover_shape_information(self, inputs, outputs): """Recover output tensor shape value to enable shape inference. The batch size of `inputs` isn't preserved by the convolution op. Calculate what the proper output shape will be for `outputs`. Args: inputs: A Tensor of shape `data_format` and of type `tf.float16`, `tf.bfloat16` or `tf.float32`. outputs: A Tensor of shape `data_format` and of type `tf.float16`, `tf.bfloat16` or `tf.float32`. The output of `inputs` from a transpose convolution op. Returns: outputs: The passed-in `outputs` with all shape information filled in. """ batch_size_value = inputs.get_shape()[0] if self._data_format.startswith("NC"): output_shape_value = ((batch_size_value, self.output_channels) + self.output_shape) elif self._data_format.startswith("N") and self._data_format.endswith("C"): output_shape_value = ((batch_size_value,) + self.output_shape + (self.output_channels,)) outputs.set_shape(output_shape_value) return outputs
[ "def", "_recover_shape_information", "(", "self", ",", "inputs", ",", "outputs", ")", ":", "batch_size_value", "=", "inputs", ".", "get_shape", "(", ")", "[", "0", "]", "if", "self", ".", "_data_format", ".", "startswith", "(", "\"NC\"", ")", ":", "output_shape_value", "=", "(", "(", "batch_size_value", ",", "self", ".", "output_channels", ")", "+", "self", ".", "output_shape", ")", "elif", "self", ".", "_data_format", ".", "startswith", "(", "\"N\"", ")", "and", "self", ".", "_data_format", ".", "endswith", "(", "\"C\"", ")", ":", "output_shape_value", "=", "(", "(", "batch_size_value", ",", ")", "+", "self", ".", "output_shape", "+", "(", "self", ".", "output_channels", ",", ")", ")", "outputs", ".", "set_shape", "(", "output_shape_value", ")", "return", "outputs" ]
Recover output tensor shape value to enable shape inference. The batch size of `inputs` isn't preserved by the convolution op. Calculate what the proper output shape will be for `outputs`. Args: inputs: A Tensor of shape `data_format` and of type `tf.float16`, `tf.bfloat16` or `tf.float32`. outputs: A Tensor of shape `data_format` and of type `tf.float16`, `tf.bfloat16` or `tf.float32`. The output of `inputs` from a transpose convolution op. Returns: outputs: The passed-in `outputs` with all shape information filled in.
[ "Recover", "output", "tensor", "shape", "value", "to", "enable", "shape", "inference", "." ]
python
train
44.32
KnuVerse/knuverse-sdk-python
knuverse/knufactor.py
https://github.com/KnuVerse/knuverse-sdk-python/blob/00f1275a452a4dcf9bc92ef345f6985504226d8e/knuverse/knufactor.py#L639-L647
def status(self): """ Get server status. Uses GET to /status interface. :Returns: (dict) Server status as described `here <https://cloud.knuverse.com/docs/api/#api-General-Status>`_. """ response = self._get(url.status) self._check_response(response, 200) return self._create_response(response)
[ "def", "status", "(", "self", ")", ":", "response", "=", "self", ".", "_get", "(", "url", ".", "status", ")", "self", ".", "_check_response", "(", "response", ",", "200", ")", "return", "self", ".", "_create_response", "(", "response", ")" ]
Get server status. Uses GET to /status interface. :Returns: (dict) Server status as described `here <https://cloud.knuverse.com/docs/api/#api-General-Status>`_.
[ "Get", "server", "status", ".", "Uses", "GET", "to", "/", "status", "interface", "." ]
python
train
38.222222
diamondman/proteusisc
proteusisc/promise.py
https://github.com/diamondman/proteusisc/blob/7622b7b04e63f9dc0f5a04429ff78d9a490c9c5c/proteusisc/promise.py#L151-L182
def _fulfill(self, bits, ignore_nonpromised_bits=False): """Supply the promise with the bits from its associated primitive's execution. The fulfillment process must walk the promise chain backwards until it reaches the original promise and can supply the final value. The data that comes in can either be all a bit read for every bit written by the associated primitive, or (if the primitive supports it), only the bits that are used by promises. The ignore_nonpromised_bits flag specifies which format the incoming data is in. Args: bits: A bitarray (or compatible) containing the data read from the jtag controller's TDO pin. ignore_nonpromised_bits: A boolean specifying if only promised bits are being returned (and thus the 2nd index of the promise must be used for slicing the incoming data). """ if self._allsubsfulfilled(): if not self._components: if ignore_nonpromised_bits: self._value = bits[self._bitstartselective: self._bitstartselective + self._bitlength] else: self._value = bits[self._bitstart:self._bitend] else: self._value = self._components[0][0]._value for sub, offset in self._components[1:]: self._value += sub._value if self._parent is not None: self._parent._fulfill(None)
[ "def", "_fulfill", "(", "self", ",", "bits", ",", "ignore_nonpromised_bits", "=", "False", ")", ":", "if", "self", ".", "_allsubsfulfilled", "(", ")", ":", "if", "not", "self", ".", "_components", ":", "if", "ignore_nonpromised_bits", ":", "self", ".", "_value", "=", "bits", "[", "self", ".", "_bitstartselective", ":", "self", ".", "_bitstartselective", "+", "self", ".", "_bitlength", "]", "else", ":", "self", ".", "_value", "=", "bits", "[", "self", ".", "_bitstart", ":", "self", ".", "_bitend", "]", "else", ":", "self", ".", "_value", "=", "self", ".", "_components", "[", "0", "]", "[", "0", "]", ".", "_value", "for", "sub", ",", "offset", "in", "self", ".", "_components", "[", "1", ":", "]", ":", "self", ".", "_value", "+=", "sub", ".", "_value", "if", "self", ".", "_parent", "is", "not", "None", ":", "self", ".", "_parent", ".", "_fulfill", "(", "None", ")" ]
Supply the promise with the bits from its associated primitive's execution. The fulfillment process must walk the promise chain backwards until it reaches the original promise and can supply the final value. The data that comes in can either be all a bit read for every bit written by the associated primitive, or (if the primitive supports it), only the bits that are used by promises. The ignore_nonpromised_bits flag specifies which format the incoming data is in. Args: bits: A bitarray (or compatible) containing the data read from the jtag controller's TDO pin. ignore_nonpromised_bits: A boolean specifying if only promised bits are being returned (and thus the 2nd index of the promise must be used for slicing the incoming data).
[ "Supply", "the", "promise", "with", "the", "bits", "from", "its", "associated", "primitive", "s", "execution", "." ]
python
train
48.3125
mattja/nsim
nsim/nsim.py
https://github.com/mattja/nsim/blob/ed62c41cd56b918fd97e09f7ad73c12c76a8c3e0/nsim/nsim.py#L1113-L1132
def coupling(self, source_y, target_y, weight): """How to couple the output of one subsystem to the input of another. This is a fallback default coupling function that should usually be replaced with your own. This example coupling function takes the mean of all variables of the source subsystem and uses that value weighted by the connection strength to drive all variables of the target subsystem. Arguments: source_y (array of shape (d,)): State of the source subsystem. target_y (array of shape (d,)): State of target subsystem. weight (float): the connection strength for this connection. Returns: input (array of shape (d,)): Values to drive each variable of the target system. """ return np.ones_like(target_y)*np.mean(source_y)*weight
[ "def", "coupling", "(", "self", ",", "source_y", ",", "target_y", ",", "weight", ")", ":", "return", "np", ".", "ones_like", "(", "target_y", ")", "*", "np", ".", "mean", "(", "source_y", ")", "*", "weight" ]
How to couple the output of one subsystem to the input of another. This is a fallback default coupling function that should usually be replaced with your own. This example coupling function takes the mean of all variables of the source subsystem and uses that value weighted by the connection strength to drive all variables of the target subsystem. Arguments: source_y (array of shape (d,)): State of the source subsystem. target_y (array of shape (d,)): State of target subsystem. weight (float): the connection strength for this connection. Returns: input (array of shape (d,)): Values to drive each variable of the target system.
[ "How", "to", "couple", "the", "output", "of", "one", "subsystem", "to", "the", "input", "of", "another", "." ]
python
train
43
vaexio/vaex
packages/vaex-core/vaex/dataframe.py
https://github.com/vaexio/vaex/blob/a45b672f8287afca2ada8e36b74b604b9b28dd85/packages/vaex-core/vaex/dataframe.py#L3050-L3074
def add_virtual_columns_cartesian_velocities_to_polar(self, x="x", y="y", vx="vx", radius_polar=None, vy="vy", vr_out="vr_polar", vazimuth_out="vphi_polar", propagate_uncertainties=False,): """Convert cartesian to polar velocities. :param x: :param y: :param vx: :param radius_polar: Optional expression for the radius, may lead to a better performance when given. :param vy: :param vr_out: :param vazimuth_out: :param propagate_uncertainties: {propagate_uncertainties} :return: """ x = self._expr(x) y = self._expr(y) vx = self._expr(vx) vy = self._expr(vy) if radius_polar is None: radius_polar = np.sqrt(x**2 + y**2) radius_polar = self._expr(radius_polar) self[vr_out] = (x*vx + y*vy) / radius_polar self[vazimuth_out] = (x*vy - y*vx) / radius_polar if propagate_uncertainties: self.propagate_uncertainties([self[vr_out], self[vazimuth_out]])
[ "def", "add_virtual_columns_cartesian_velocities_to_polar", "(", "self", ",", "x", "=", "\"x\"", ",", "y", "=", "\"y\"", ",", "vx", "=", "\"vx\"", ",", "radius_polar", "=", "None", ",", "vy", "=", "\"vy\"", ",", "vr_out", "=", "\"vr_polar\"", ",", "vazimuth_out", "=", "\"vphi_polar\"", ",", "propagate_uncertainties", "=", "False", ",", ")", ":", "x", "=", "self", ".", "_expr", "(", "x", ")", "y", "=", "self", ".", "_expr", "(", "y", ")", "vx", "=", "self", ".", "_expr", "(", "vx", ")", "vy", "=", "self", ".", "_expr", "(", "vy", ")", "if", "radius_polar", "is", "None", ":", "radius_polar", "=", "np", ".", "sqrt", "(", "x", "**", "2", "+", "y", "**", "2", ")", "radius_polar", "=", "self", ".", "_expr", "(", "radius_polar", ")", "self", "[", "vr_out", "]", "=", "(", "x", "*", "vx", "+", "y", "*", "vy", ")", "/", "radius_polar", "self", "[", "vazimuth_out", "]", "=", "(", "x", "*", "vy", "-", "y", "*", "vx", ")", "/", "radius_polar", "if", "propagate_uncertainties", ":", "self", ".", "propagate_uncertainties", "(", "[", "self", "[", "vr_out", "]", ",", "self", "[", "vazimuth_out", "]", "]", ")" ]
Convert cartesian to polar velocities. :param x: :param y: :param vx: :param radius_polar: Optional expression for the radius, may lead to a better performance when given. :param vy: :param vr_out: :param vazimuth_out: :param propagate_uncertainties: {propagate_uncertainties} :return:
[ "Convert", "cartesian", "to", "polar", "velocities", "." ]
python
test
42.84
census-instrumentation/opencensus-python
nox.py
https://github.com/census-instrumentation/opencensus-python/blob/992b223f7e34c5dcb65922b7d5c827e7a1351e7d/nox.py#L82-L107
def system(session, py): """Run the system test suite.""" # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(py) # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py # Install all test dependencies. session.install('-r', 'requirements-test.txt') # Install dev packages into the virtualenv's dist-packages. _install_dev_packages(session) # Run py.test against the system tests. session.run( 'py.test', '-s', 'tests/system/', *session.posargs )
[ "def", "system", "(", "session", ",", "py", ")", ":", "# Sanity check: Only run system tests if the environment variable is set.", "if", "not", "os", ".", "environ", ".", "get", "(", "'GOOGLE_APPLICATION_CREDENTIALS'", ",", "''", ")", ":", "session", ".", "skip", "(", "'Credentials must be set via environment variable.'", ")", "# Run the system tests against latest Python 2 and Python 3 only.", "session", ".", "interpreter", "=", "'python{}'", ".", "format", "(", "py", ")", "# Set the virtualenv dirname.", "session", ".", "virtualenv_dirname", "=", "'sys-'", "+", "py", "# Install all test dependencies.", "session", ".", "install", "(", "'-r'", ",", "'requirements-test.txt'", ")", "# Install dev packages into the virtualenv's dist-packages.", "_install_dev_packages", "(", "session", ")", "# Run py.test against the system tests.", "session", ".", "run", "(", "'py.test'", ",", "'-s'", ",", "'tests/system/'", ",", "*", "session", ".", "posargs", ")" ]
Run the system test suite.
[ "Run", "the", "system", "test", "suite", "." ]
python
train
30.5
SKA-ScienceDataProcessor/integration-prototype
sip/platform/logging/sip_logging/sip_logging.py
https://github.com/SKA-ScienceDataProcessor/integration-prototype/blob/8c8006de6ad71dcd44114b0338780738079c87d4/sip/platform/logging/sip_logging/sip_logging.py#L31-L43
def formatTime(self, record, datefmt=None): """Format the log timestamp.""" _seconds_fraction = record.created - int(record.created) _datetime_utc = time.mktime(time.gmtime(record.created)) _datetime_utc += _seconds_fraction _created = self.converter(_datetime_utc) if datefmt: time_string = _created.strftime(datefmt) else: time_string = _created.strftime('%Y-%m-%dT%H:%M:%S.%fZ') time_string = "%s,%03d" % (time_string, record.msecs) return time_string
[ "def", "formatTime", "(", "self", ",", "record", ",", "datefmt", "=", "None", ")", ":", "_seconds_fraction", "=", "record", ".", "created", "-", "int", "(", "record", ".", "created", ")", "_datetime_utc", "=", "time", ".", "mktime", "(", "time", ".", "gmtime", "(", "record", ".", "created", ")", ")", "_datetime_utc", "+=", "_seconds_fraction", "_created", "=", "self", ".", "converter", "(", "_datetime_utc", ")", "if", "datefmt", ":", "time_string", "=", "_created", ".", "strftime", "(", "datefmt", ")", "else", ":", "time_string", "=", "_created", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%S.%fZ'", ")", "time_string", "=", "\"%s,%03d\"", "%", "(", "time_string", ",", "record", ".", "msecs", ")", "return", "time_string" ]
Format the log timestamp.
[ "Format", "the", "log", "timestamp", "." ]
python
train
41.769231
loli/medpy
medpy/metric/histogram.py
https://github.com/loli/medpy/blob/95216b9e22e7ce301f0edf953ee2a2f1b6c6aee5/medpy/metric/histogram.py#L1185-L1215
def quadratic_forms(h1, h2): r""" Quadrativ forms metric. Notes ----- UNDER DEVELOPMENT This distance measure shows very strange behaviour. The expression transpose(h1-h2) * A * (h1-h2) yields egative values that can not be processed by the square root. Some examples:: h1 h2 transpose(h1-h2) * A * (h1-h2) [1, 0] to [0.0, 1.0] : -2.0 [1, 0] to [0.5, 0.5] : 0.0 [1, 0] to [0.6666666666666667, 0.3333333333333333] : 0.111111111111 [1, 0] to [0.75, 0.25] : 0.0833333333333 [1, 0] to [0.8, 0.2] : 0.06 [1, 0] to [0.8333333333333334, 0.16666666666666666] : 0.0444444444444 [1, 0] to [0.8571428571428572, 0.14285714285714285] : 0.0340136054422 [1, 0] to [0.875, 0.125] : 0.0267857142857 [1, 0] to [0.8888888888888888, 0.1111111111111111] : 0.0216049382716 [1, 0] to [0.9, 0.1] : 0.0177777777778 [1, 0] to [1, 0]: 0.0 It is clearly undesireable to recieve negative values and even worse to get a value of zero for other cases than the same histograms. """ h1, h2 = __prepare_histogram(h1, h2) A = __quadratic_forms_matrix_euclidean(h1, h2) return math.sqrt((h1-h2).dot(A.dot(h1-h2)))
[ "def", "quadratic_forms", "(", "h1", ",", "h2", ")", ":", "h1", ",", "h2", "=", "__prepare_histogram", "(", "h1", ",", "h2", ")", "A", "=", "__quadratic_forms_matrix_euclidean", "(", "h1", ",", "h2", ")", "return", "math", ".", "sqrt", "(", "(", "h1", "-", "h2", ")", ".", "dot", "(", "A", ".", "dot", "(", "h1", "-", "h2", ")", ")", ")" ]
r""" Quadrativ forms metric. Notes ----- UNDER DEVELOPMENT This distance measure shows very strange behaviour. The expression transpose(h1-h2) * A * (h1-h2) yields egative values that can not be processed by the square root. Some examples:: h1 h2 transpose(h1-h2) * A * (h1-h2) [1, 0] to [0.0, 1.0] : -2.0 [1, 0] to [0.5, 0.5] : 0.0 [1, 0] to [0.6666666666666667, 0.3333333333333333] : 0.111111111111 [1, 0] to [0.75, 0.25] : 0.0833333333333 [1, 0] to [0.8, 0.2] : 0.06 [1, 0] to [0.8333333333333334, 0.16666666666666666] : 0.0444444444444 [1, 0] to [0.8571428571428572, 0.14285714285714285] : 0.0340136054422 [1, 0] to [0.875, 0.125] : 0.0267857142857 [1, 0] to [0.8888888888888888, 0.1111111111111111] : 0.0216049382716 [1, 0] to [0.9, 0.1] : 0.0177777777778 [1, 0] to [1, 0]: 0.0 It is clearly undesireable to recieve negative values and even worse to get a value of zero for other cases than the same histograms.
[ "r", "Quadrativ", "forms", "metric", ".", "Notes", "-----", "UNDER", "DEVELOPMENT", "This", "distance", "measure", "shows", "very", "strange", "behaviour", ".", "The", "expression", "transpose", "(", "h1", "-", "h2", ")", "*", "A", "*", "(", "h1", "-", "h2", ")", "yields", "egative", "values", "that", "can", "not", "be", "processed", "by", "the", "square", "root", ".", "Some", "examples", "::", "h1", "h2", "transpose", "(", "h1", "-", "h2", ")", "*", "A", "*", "(", "h1", "-", "h2", ")", "[", "1", "0", "]", "to", "[", "0", ".", "0", "1", ".", "0", "]", ":", "-", "2", ".", "0", "[", "1", "0", "]", "to", "[", "0", ".", "5", "0", ".", "5", "]", ":", "0", ".", "0", "[", "1", "0", "]", "to", "[", "0", ".", "6666666666666667", "0", ".", "3333333333333333", "]", ":", "0", ".", "111111111111", "[", "1", "0", "]", "to", "[", "0", ".", "75", "0", ".", "25", "]", ":", "0", ".", "0833333333333", "[", "1", "0", "]", "to", "[", "0", ".", "8", "0", ".", "2", "]", ":", "0", ".", "06", "[", "1", "0", "]", "to", "[", "0", ".", "8333333333333334", "0", ".", "16666666666666666", "]", ":", "0", ".", "0444444444444", "[", "1", "0", "]", "to", "[", "0", ".", "8571428571428572", "0", ".", "14285714285714285", "]", ":", "0", ".", "0340136054422", "[", "1", "0", "]", "to", "[", "0", ".", "875", "0", ".", "125", "]", ":", "0", ".", "0267857142857", "[", "1", "0", "]", "to", "[", "0", ".", "8888888888888888", "0", ".", "1111111111111111", "]", ":", "0", ".", "0216049382716", "[", "1", "0", "]", "to", "[", "0", ".", "9", "0", ".", "1", "]", ":", "0", ".", "0177777777778", "[", "1", "0", "]", "to", "[", "1", "0", "]", ":", "0", ".", "0", "It", "is", "clearly", "undesireable", "to", "recieve", "negative", "values", "and", "even", "worse", "to", "get", "a", "value", "of", "zero", "for", "other", "cases", "than", "the", "same", "histograms", "." ]
python
train
48.225806
google/mobly
mobly/controllers/android_device.py
https://github.com/google/mobly/blob/38ba2cf7d29a20e6a2fca1718eecb337df38db26/mobly/controllers/android_device.py#L274-L287
def get_all_instances(include_fastboot=False): """Create AndroidDevice instances for all attached android devices. Args: include_fastboot: Whether to include devices in bootloader mode or not. Returns: A list of AndroidDevice objects each representing an android device attached to the computer. """ if include_fastboot: serial_list = list_adb_devices() + list_fastboot_devices() return get_instances(serial_list) return get_instances(list_adb_devices())
[ "def", "get_all_instances", "(", "include_fastboot", "=", "False", ")", ":", "if", "include_fastboot", ":", "serial_list", "=", "list_adb_devices", "(", ")", "+", "list_fastboot_devices", "(", ")", "return", "get_instances", "(", "serial_list", ")", "return", "get_instances", "(", "list_adb_devices", "(", ")", ")" ]
Create AndroidDevice instances for all attached android devices. Args: include_fastboot: Whether to include devices in bootloader mode or not. Returns: A list of AndroidDevice objects each representing an android device attached to the computer.
[ "Create", "AndroidDevice", "instances", "for", "all", "attached", "android", "devices", "." ]
python
train
36.214286
kislyuk/aegea
aegea/packages/github3/github.py
https://github.com/kislyuk/aegea/blob/94957e9dba036eae3052e2662c208b259c08399a/aegea/packages/github3/github.py#L326-L334
def gist(self, id_num): """Gets the gist using the specified id number. :param int id_num: (required), unique id of the gist :returns: :class:`Gist <github3.gists.Gist>` """ url = self._build_url('gists', str(id_num)) json = self._json(self._get(url), 200) return Gist(json, self) if json else None
[ "def", "gist", "(", "self", ",", "id_num", ")", ":", "url", "=", "self", ".", "_build_url", "(", "'gists'", ",", "str", "(", "id_num", ")", ")", "json", "=", "self", ".", "_json", "(", "self", ".", "_get", "(", "url", ")", ",", "200", ")", "return", "Gist", "(", "json", ",", "self", ")", "if", "json", "else", "None" ]
Gets the gist using the specified id number. :param int id_num: (required), unique id of the gist :returns: :class:`Gist <github3.gists.Gist>`
[ "Gets", "the", "gist", "using", "the", "specified", "id", "number", "." ]
python
train
38.555556
openstack/networking-cisco
networking_cisco/apps/saf/server/dfa_server.py
https://github.com/openstack/networking-cisco/blob/aa58a30aec25b86f9aa5952b0863045975debfa9/networking_cisco/apps/saf/server/dfa_server.py#L697-L701
def _get_segmentation_id(self, netid, segid, source): """Allocate segmentation id. """ return self.seg_drvr.allocate_segmentation_id(netid, seg_id=segid, source=source)
[ "def", "_get_segmentation_id", "(", "self", ",", "netid", ",", "segid", ",", "source", ")", ":", "return", "self", ".", "seg_drvr", ".", "allocate_segmentation_id", "(", "netid", ",", "seg_id", "=", "segid", ",", "source", "=", "source", ")" ]
Allocate segmentation id.
[ "Allocate", "segmentation", "id", "." ]
python
train
47
eandersson/amqpstorm
amqpstorm/message.py
https://github.com/eandersson/amqpstorm/blob/38330906c0af19eea482f43c5ce79bab98a1e064/amqpstorm/message.py#L99-L113
def ack(self): """Acknowledge Message. :raises AMQPInvalidArgument: Invalid Parameters :raises AMQPChannelError: Raises if the channel encountered an error. :raises AMQPConnectionError: Raises if the connection encountered an error. :return: """ if not self._method: raise AMQPMessageError( 'Message.ack only available on incoming messages' ) self._channel.basic.ack(delivery_tag=self.delivery_tag)
[ "def", "ack", "(", "self", ")", ":", "if", "not", "self", ".", "_method", ":", "raise", "AMQPMessageError", "(", "'Message.ack only available on incoming messages'", ")", "self", ".", "_channel", ".", "basic", ".", "ack", "(", "delivery_tag", "=", "self", ".", "delivery_tag", ")" ]
Acknowledge Message. :raises AMQPInvalidArgument: Invalid Parameters :raises AMQPChannelError: Raises if the channel encountered an error. :raises AMQPConnectionError: Raises if the connection encountered an error. :return:
[ "Acknowledge", "Message", "." ]
python
train
35.133333
lpantano/seqcluster
seqcluster/libs/read.py
https://github.com/lpantano/seqcluster/blob/774e23add8cd4fdc83d626cea3bd1f458e7d060d/seqcluster/libs/read.py#L165-L184
def read_alignment(out_sam, loci, seqs, out_file): """read which seqs map to which loci and return a tab separated file""" hits = defaultdict(list) with open(out_file, "w") as out_handle: samfile = pysam.Samfile(out_sam, "r") for a in samfile.fetch(): if not a.is_unmapped: nm = int([t[1] for t in a.tags if t[0] == "NM"][0]) a = makeBED(a) if not a: continue ref, locus = get_loci(samfile.getrname(int(a.chr)), loci) hits[a.name].append((nm, "%s %s %s %s %s %s" % (a.name, a.name.split("-")[0], locus, ref, a.start, a.end))) for hit in hits.values(): nm = hit[0][0] for l in hit: if nm == l[0]: print(l[1], file=out_handle) return out_file
[ "def", "read_alignment", "(", "out_sam", ",", "loci", ",", "seqs", ",", "out_file", ")", ":", "hits", "=", "defaultdict", "(", "list", ")", "with", "open", "(", "out_file", ",", "\"w\"", ")", "as", "out_handle", ":", "samfile", "=", "pysam", ".", "Samfile", "(", "out_sam", ",", "\"r\"", ")", "for", "a", "in", "samfile", ".", "fetch", "(", ")", ":", "if", "not", "a", ".", "is_unmapped", ":", "nm", "=", "int", "(", "[", "t", "[", "1", "]", "for", "t", "in", "a", ".", "tags", "if", "t", "[", "0", "]", "==", "\"NM\"", "]", "[", "0", "]", ")", "a", "=", "makeBED", "(", "a", ")", "if", "not", "a", ":", "continue", "ref", ",", "locus", "=", "get_loci", "(", "samfile", ".", "getrname", "(", "int", "(", "a", ".", "chr", ")", ")", ",", "loci", ")", "hits", "[", "a", ".", "name", "]", ".", "append", "(", "(", "nm", ",", "\"%s %s %s %s %s %s\"", "%", "(", "a", ".", "name", ",", "a", ".", "name", ".", "split", "(", "\"-\"", ")", "[", "0", "]", ",", "locus", ",", "ref", ",", "a", ".", "start", ",", "a", ".", "end", ")", ")", ")", "for", "hit", "in", "hits", ".", "values", "(", ")", ":", "nm", "=", "hit", "[", "0", "]", "[", "0", "]", "for", "l", "in", "hit", ":", "if", "nm", "==", "l", "[", "0", "]", ":", "print", "(", "l", "[", "1", "]", ",", "file", "=", "out_handle", ")", "return", "out_file" ]
read which seqs map to which loci and return a tab separated file
[ "read", "which", "seqs", "map", "to", "which", "loci", "and", "return", "a", "tab", "separated", "file" ]
python
train
41.85
sethmlarson/virtualbox-python
virtualbox/library.py
https://github.com/sethmlarson/virtualbox-python/blob/706c8e3f6e3aee17eb06458e73cbb4bc2d37878b/virtualbox/library.py#L25488-L25518
def take_screen_shot_to_array(self, screen_id, width, height, bitmap_format): """Takes a guest screen shot of the requested size and format and returns it as an array of bytes. in screen_id of type int The guest monitor to take screenshot from. in width of type int Desired image width. in height of type int Desired image height. in bitmap_format of type :class:`BitmapFormat` The requested format. return screen_data of type str Array with resulting screen data. """ if not isinstance(screen_id, baseinteger): raise TypeError("screen_id can only be an instance of type baseinteger") if not isinstance(width, baseinteger): raise TypeError("width can only be an instance of type baseinteger") if not isinstance(height, baseinteger): raise TypeError("height can only be an instance of type baseinteger") if not isinstance(bitmap_format, BitmapFormat): raise TypeError("bitmap_format can only be an instance of type BitmapFormat") screen_data = self._call("takeScreenShotToArray", in_p=[screen_id, width, height, bitmap_format]) return screen_data
[ "def", "take_screen_shot_to_array", "(", "self", ",", "screen_id", ",", "width", ",", "height", ",", "bitmap_format", ")", ":", "if", "not", "isinstance", "(", "screen_id", ",", "baseinteger", ")", ":", "raise", "TypeError", "(", "\"screen_id can only be an instance of type baseinteger\"", ")", "if", "not", "isinstance", "(", "width", ",", "baseinteger", ")", ":", "raise", "TypeError", "(", "\"width can only be an instance of type baseinteger\"", ")", "if", "not", "isinstance", "(", "height", ",", "baseinteger", ")", ":", "raise", "TypeError", "(", "\"height can only be an instance of type baseinteger\"", ")", "if", "not", "isinstance", "(", "bitmap_format", ",", "BitmapFormat", ")", ":", "raise", "TypeError", "(", "\"bitmap_format can only be an instance of type BitmapFormat\"", ")", "screen_data", "=", "self", ".", "_call", "(", "\"takeScreenShotToArray\"", ",", "in_p", "=", "[", "screen_id", ",", "width", ",", "height", ",", "bitmap_format", "]", ")", "return", "screen_data" ]
Takes a guest screen shot of the requested size and format and returns it as an array of bytes. in screen_id of type int The guest monitor to take screenshot from. in width of type int Desired image width. in height of type int Desired image height. in bitmap_format of type :class:`BitmapFormat` The requested format. return screen_data of type str Array with resulting screen data.
[ "Takes", "a", "guest", "screen", "shot", "of", "the", "requested", "size", "and", "format", "and", "returns", "it", "as", "an", "array", "of", "bytes", "." ]
python
train
40.709677
alejandrobll/py-sphviewer
sphviewer/Scene.py
https://github.com/alejandrobll/py-sphviewer/blob/f198bd9ed5adfb58ebdf66d169206e609fd46e42/sphviewer/Scene.py#L130-L137
def get_scene(self): """ - get_scene(): It return the x and y position, the smoothing length of the particles and the index of the particles that are active in the scene. In principle this is an internal function and you don't need this data. """ return self._x, self._y, self._hsml, self._m, self._kview
[ "def", "get_scene", "(", "self", ")", ":", "return", "self", ".", "_x", ",", "self", ".", "_y", ",", "self", ".", "_hsml", ",", "self", ".", "_m", ",", "self", ".", "_kview" ]
- get_scene(): It return the x and y position, the smoothing length of the particles and the index of the particles that are active in the scene. In principle this is an internal function and you don't need this data.
[ "-", "get_scene", "()", ":", "It", "return", "the", "x", "and", "y", "position", "the", "smoothing", "length", "of", "the", "particles", "and", "the", "index", "of", "the", "particles", "that", "are", "active", "in", "the", "scene", ".", "In", "principle", "this", "is", "an", "internal", "function", "and", "you", "don", "t", "need", "this", "data", "." ]
python
train
44.625
proycon/clam
clam/common/parameters.py
https://github.com/proycon/clam/blob/09d15cfc26d7cbe0f5976cdd5424dc446d10dbf3/clam/common/parameters.py#L212-L256
def fromxml(node): """Create a Parameter instance (of any class derived from AbstractParameter!) given its XML description. Node can be a string containing XML or an lxml _Element""" if not isinstance(node,ElementTree._Element): #pylint: disable=protected-access node = ElementTree.parse(StringIO(node)).getroot() if node.tag in globals(): id = '' paramflag = '' name = '' description = '' kwargs = {} error = None for attrib, value in node.attrib.items(): if attrib == 'id': id = value elif attrib == 'paramflag': paramflag = value elif attrib == 'name': name = value elif attrib == 'description': description = value elif attrib == 'error': error = value else: kwargs[attrib] = value #extra parsing for choice parameter (TODO: put in a better spot) if 'multi' in kwargs and (kwargs['multi'] == 'yes' or kwargs['multi'] == '1' or kwargs['multi'] == 'true'): kwargs['value'] = [] for subtag in node: #parse possible subtags if subtag.tag == 'choice': #extra parsing for choice parameter (TODO: put in a better spot) if 'choices' not in kwargs: kwargs['choices'] = {} kwargs['choices'][subtag.attrib['id']] = subtag.text if 'selected' in subtag.attrib and (subtag.attrib['selected'] == '1' or subtag.attrib['selected'] == 'yes'): if 'multi' in kwargs and (kwargs['multi'] == 'yes' or kwargs['multi'] == '1' or kwargs['multi'] == 'true'): kwargs['value'].append(subtag.attrib['id']) else: kwargs['value'] = subtag.attrib['id'] parameter = globals()[node.tag](id, name, description, **kwargs) #return parameter instance if error: parameter.error = error #prevent error from getting reset return parameter else: raise Exception("No such parameter exists: " + node.tag)
[ "def", "fromxml", "(", "node", ")", ":", "if", "not", "isinstance", "(", "node", ",", "ElementTree", ".", "_Element", ")", ":", "#pylint: disable=protected-access", "node", "=", "ElementTree", ".", "parse", "(", "StringIO", "(", "node", ")", ")", ".", "getroot", "(", ")", "if", "node", ".", "tag", "in", "globals", "(", ")", ":", "id", "=", "''", "paramflag", "=", "''", "name", "=", "''", "description", "=", "''", "kwargs", "=", "{", "}", "error", "=", "None", "for", "attrib", ",", "value", "in", "node", ".", "attrib", ".", "items", "(", ")", ":", "if", "attrib", "==", "'id'", ":", "id", "=", "value", "elif", "attrib", "==", "'paramflag'", ":", "paramflag", "=", "value", "elif", "attrib", "==", "'name'", ":", "name", "=", "value", "elif", "attrib", "==", "'description'", ":", "description", "=", "value", "elif", "attrib", "==", "'error'", ":", "error", "=", "value", "else", ":", "kwargs", "[", "attrib", "]", "=", "value", "#extra parsing for choice parameter (TODO: put in a better spot)", "if", "'multi'", "in", "kwargs", "and", "(", "kwargs", "[", "'multi'", "]", "==", "'yes'", "or", "kwargs", "[", "'multi'", "]", "==", "'1'", "or", "kwargs", "[", "'multi'", "]", "==", "'true'", ")", ":", "kwargs", "[", "'value'", "]", "=", "[", "]", "for", "subtag", "in", "node", ":", "#parse possible subtags", "if", "subtag", ".", "tag", "==", "'choice'", ":", "#extra parsing for choice parameter (TODO: put in a better spot)", "if", "'choices'", "not", "in", "kwargs", ":", "kwargs", "[", "'choices'", "]", "=", "{", "}", "kwargs", "[", "'choices'", "]", "[", "subtag", ".", "attrib", "[", "'id'", "]", "]", "=", "subtag", ".", "text", "if", "'selected'", "in", "subtag", ".", "attrib", "and", "(", "subtag", ".", "attrib", "[", "'selected'", "]", "==", "'1'", "or", "subtag", ".", "attrib", "[", "'selected'", "]", "==", "'yes'", ")", ":", "if", "'multi'", "in", "kwargs", "and", "(", "kwargs", "[", "'multi'", "]", "==", "'yes'", "or", "kwargs", "[", "'multi'", "]", "==", "'1'", "or", "kwargs", "[", "'multi'", "]", "==", "'true'", ")", ":", "kwargs", "[", "'value'", "]", ".", "append", "(", "subtag", ".", "attrib", "[", "'id'", "]", ")", "else", ":", "kwargs", "[", "'value'", "]", "=", "subtag", ".", "attrib", "[", "'id'", "]", "parameter", "=", "globals", "(", ")", "[", "node", ".", "tag", "]", "(", "id", ",", "name", ",", "description", ",", "*", "*", "kwargs", ")", "#return parameter instance", "if", "error", ":", "parameter", ".", "error", "=", "error", "#prevent error from getting reset", "return", "parameter", "else", ":", "raise", "Exception", "(", "\"No such parameter exists: \"", "+", "node", ".", "tag", ")" ]
Create a Parameter instance (of any class derived from AbstractParameter!) given its XML description. Node can be a string containing XML or an lxml _Element
[ "Create", "a", "Parameter", "instance", "(", "of", "any", "class", "derived", "from", "AbstractParameter!", ")", "given", "its", "XML", "description", ".", "Node", "can", "be", "a", "string", "containing", "XML", "or", "an", "lxml", "_Element" ]
python
train
50.288889
pyviz/holoviews
holoviews/core/options.py
https://github.com/pyviz/holoviews/blob/ae0dd2f3de448b0ca5e9065aabd6ef8d84c7e655/holoviews/core/options.py#L515-L517
def cyclic(self): "Returns True if the options cycle, otherwise False" return any(isinstance(val, Cycle) for val in self.kwargs.values())
[ "def", "cyclic", "(", "self", ")", ":", "return", "any", "(", "isinstance", "(", "val", ",", "Cycle", ")", "for", "val", "in", "self", ".", "kwargs", ".", "values", "(", ")", ")" ]
Returns True if the options cycle, otherwise False
[ "Returns", "True", "if", "the", "options", "cycle", "otherwise", "False" ]
python
train
50.333333
apple/turicreate
deps/src/libxml2-2.9.1/python/libxml2.py
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/deps/src/libxml2-2.9.1/python/libxml2.py#L1531-L1537
def nodePop(ctxt): """Pops the top element node from the node stack """ if ctxt is None: ctxt__o = None else: ctxt__o = ctxt._o ret = libxml2mod.nodePop(ctxt__o) if ret is None:raise treeError('nodePop() failed') return xmlNode(_obj=ret)
[ "def", "nodePop", "(", "ctxt", ")", ":", "if", "ctxt", "is", "None", ":", "ctxt__o", "=", "None", "else", ":", "ctxt__o", "=", "ctxt", ".", "_o", "ret", "=", "libxml2mod", ".", "nodePop", "(", "ctxt__o", ")", "if", "ret", "is", "None", ":", "raise", "treeError", "(", "'nodePop() failed'", ")", "return", "xmlNode", "(", "_obj", "=", "ret", ")" ]
Pops the top element node from the node stack
[ "Pops", "the", "top", "element", "node", "from", "the", "node", "stack" ]
python
train
36.428571
dvdotsenko/jsonrpc.py
jsonrpcparts/serializers.py
https://github.com/dvdotsenko/jsonrpc.py/blob/19673edd77a9518ac5655bd407f6b93ffbb2cafc/jsonrpcparts/serializers.py#L57-L68
def json_loads(cls, s, **kwargs): """ A rewrap of json.loads done for one reason - to inject a custom `cls` kwarg :param s: :param kwargs: :return: :rtype: dict """ if 'cls' not in kwargs: kwargs['cls'] = cls.json_decoder return json.loads(s, **kwargs)
[ "def", "json_loads", "(", "cls", ",", "s", ",", "*", "*", "kwargs", ")", ":", "if", "'cls'", "not", "in", "kwargs", ":", "kwargs", "[", "'cls'", "]", "=", "cls", ".", "json_decoder", "return", "json", ".", "loads", "(", "s", ",", "*", "*", "kwargs", ")" ]
A rewrap of json.loads done for one reason - to inject a custom `cls` kwarg :param s: :param kwargs: :return: :rtype: dict
[ "A", "rewrap", "of", "json", ".", "loads", "done", "for", "one", "reason", "-", "to", "inject", "a", "custom", "cls", "kwarg" ]
python
train
27.166667
secynic/ipwhois
ipwhois/asn.py
https://github.com/secynic/ipwhois/blob/b5d634d36b0b942d538d38d77b3bdcd815f155a0/ipwhois/asn.py#L772-L780
def _get_nets_radb(self, *args, **kwargs): """ Deprecated. This will be removed in a future release. """ from warnings import warn warn('ASNOrigin._get_nets_radb() has been deprecated and will be ' 'removed. You should now use ASNOrigin.get_nets_radb().') return self.get_nets_radb(*args, **kwargs)
[ "def", "_get_nets_radb", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", "warnings", "import", "warn", "warn", "(", "'ASNOrigin._get_nets_radb() has been deprecated and will be '", "'removed. You should now use ASNOrigin.get_nets_radb().'", ")", "return", "self", ".", "get_nets_radb", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Deprecated. This will be removed in a future release.
[ "Deprecated", ".", "This", "will", "be", "removed", "in", "a", "future", "release", "." ]
python
train
39.111111
tyiannak/pyAudioAnalysis
pyAudioAnalysis/audioFeatureExtraction.py
https://github.com/tyiannak/pyAudioAnalysis/blob/e3da991e7247492deba50648a4c7c0f41e684af4/pyAudioAnalysis/audioFeatureExtraction.py#L74-L87
def stSpectralEntropy(X, n_short_blocks=10): """Computes the spectral entropy""" L = len(X) # number of frame samples Eol = numpy.sum(X ** 2) # total spectral energy sub_win_len = int(numpy.floor(L / n_short_blocks)) # length of sub-frame if L != sub_win_len * n_short_blocks: X = X[0:sub_win_len * n_short_blocks] sub_wins = X.reshape(sub_win_len, n_short_blocks, order='F').copy() # define sub-frames (using matrix reshape) s = numpy.sum(sub_wins ** 2, axis=0) / (Eol + eps) # compute spectral sub-energies En = -numpy.sum(s*numpy.log2(s + eps)) # compute spectral entropy return En
[ "def", "stSpectralEntropy", "(", "X", ",", "n_short_blocks", "=", "10", ")", ":", "L", "=", "len", "(", "X", ")", "# number of frame samples", "Eol", "=", "numpy", ".", "sum", "(", "X", "**", "2", ")", "# total spectral energy", "sub_win_len", "=", "int", "(", "numpy", ".", "floor", "(", "L", "/", "n_short_blocks", ")", ")", "# length of sub-frame", "if", "L", "!=", "sub_win_len", "*", "n_short_blocks", ":", "X", "=", "X", "[", "0", ":", "sub_win_len", "*", "n_short_blocks", "]", "sub_wins", "=", "X", ".", "reshape", "(", "sub_win_len", ",", "n_short_blocks", ",", "order", "=", "'F'", ")", ".", "copy", "(", ")", "# define sub-frames (using matrix reshape)", "s", "=", "numpy", ".", "sum", "(", "sub_wins", "**", "2", ",", "axis", "=", "0", ")", "/", "(", "Eol", "+", "eps", ")", "# compute spectral sub-energies", "En", "=", "-", "numpy", ".", "sum", "(", "s", "*", "numpy", ".", "log2", "(", "s", "+", "eps", ")", ")", "# compute spectral entropy", "return", "En" ]
Computes the spectral entropy
[ "Computes", "the", "spectral", "entropy" ]
python
train
50.857143
neithere/django-navigation
navigation/resolvers.py
https://github.com/neithere/django-navigation/blob/aff8d671a8431c84dde65cba6236ea8c16a08b4d/navigation/resolvers.py#L150-L184
def _resolve_by_callback(request, url, urlconf=None): """ Finds a view function by urlconf. If the function has attribute 'navigation', it is used as breadcrumb title. Such title can be either a callable or an object with `__unicode__` attribute. If it is callable, it must follow the views API (i.e. the only required argument is request object). It is also expected to return a `unicode` value. """ try: callback, args, kwargs = _resolve_url(url, request, urlconf=urlconf) except urlresolvers.Resolver404: return None bc = getattr(callback, 'breadcrumb', None) if bc is None: bc = getattr(callback, 'navigation', None) if bc is not None: # pragma: nocover import warnings warnings.warn('The "navigation" attribute is deprecated, use ' '"breadcrumb" instead.') if bc is None: return None if hasattr(bc, '__call__'): # the breadcrumb is a function with an API identical to that of views. try: title = bc(request, *args, **kwargs) except http.Http404: return None assert isinstance(title, basestring), ( 'Breadcrumb function must return Unicode, not %s' % title) else: title = unicode(bc) # handle i18n proxy objects return Crumb(url, title)
[ "def", "_resolve_by_callback", "(", "request", ",", "url", ",", "urlconf", "=", "None", ")", ":", "try", ":", "callback", ",", "args", ",", "kwargs", "=", "_resolve_url", "(", "url", ",", "request", ",", "urlconf", "=", "urlconf", ")", "except", "urlresolvers", ".", "Resolver404", ":", "return", "None", "bc", "=", "getattr", "(", "callback", ",", "'breadcrumb'", ",", "None", ")", "if", "bc", "is", "None", ":", "bc", "=", "getattr", "(", "callback", ",", "'navigation'", ",", "None", ")", "if", "bc", "is", "not", "None", ":", "# pragma: nocover", "import", "warnings", "warnings", ".", "warn", "(", "'The \"navigation\" attribute is deprecated, use '", "'\"breadcrumb\" instead.'", ")", "if", "bc", "is", "None", ":", "return", "None", "if", "hasattr", "(", "bc", ",", "'__call__'", ")", ":", "# the breadcrumb is a function with an API identical to that of views.", "try", ":", "title", "=", "bc", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "http", ".", "Http404", ":", "return", "None", "assert", "isinstance", "(", "title", ",", "basestring", ")", ",", "(", "'Breadcrumb function must return Unicode, not %s'", "%", "title", ")", "else", ":", "title", "=", "unicode", "(", "bc", ")", "# handle i18n proxy objects", "return", "Crumb", "(", "url", ",", "title", ")" ]
Finds a view function by urlconf. If the function has attribute 'navigation', it is used as breadcrumb title. Such title can be either a callable or an object with `__unicode__` attribute. If it is callable, it must follow the views API (i.e. the only required argument is request object). It is also expected to return a `unicode` value.
[ "Finds", "a", "view", "function", "by", "urlconf", ".", "If", "the", "function", "has", "attribute", "navigation", "it", "is", "used", "as", "breadcrumb", "title", ".", "Such", "title", "can", "be", "either", "a", "callable", "or", "an", "object", "with", "__unicode__", "attribute", ".", "If", "it", "is", "callable", "it", "must", "follow", "the", "views", "API", "(", "i", ".", "e", ".", "the", "only", "required", "argument", "is", "request", "object", ")", ".", "It", "is", "also", "expected", "to", "return", "a", "unicode", "value", "." ]
python
train
38.257143
abarker/pdfCropMargins
src/pdfCropMargins/calculate_bounding_boxes.py
https://github.com/abarker/pdfCropMargins/blob/55aca874613750ebf4ae69fd8851bdbb7696d6ac/src/pdfCropMargins/calculate_bounding_boxes.py#L99-L113
def correct_bounding_box_list_for_nonzero_origin(bbox_list, full_box_list): """The bounding box calculated from an image has coordinates relative to the lower-left point in the PDF being at zero. Similarly, Ghostscript reports a bounding box relative to a zero lower-left point. If the MediaBox (or full page box) has been shifted, like when cropping a previously cropped document, then we need to correct the bounding box by an additive translation on all the points.""" corrected_box_list = [] for bbox, full_box in zip(bbox_list, full_box_list): left_x = full_box[0] lower_y = full_box[1] corrected_box_list.append([bbox[0]+left_x, bbox[1]+lower_y, bbox[2]+left_x, bbox[3]+lower_y]) return corrected_box_list
[ "def", "correct_bounding_box_list_for_nonzero_origin", "(", "bbox_list", ",", "full_box_list", ")", ":", "corrected_box_list", "=", "[", "]", "for", "bbox", ",", "full_box", "in", "zip", "(", "bbox_list", ",", "full_box_list", ")", ":", "left_x", "=", "full_box", "[", "0", "]", "lower_y", "=", "full_box", "[", "1", "]", "corrected_box_list", ".", "append", "(", "[", "bbox", "[", "0", "]", "+", "left_x", ",", "bbox", "[", "1", "]", "+", "lower_y", ",", "bbox", "[", "2", "]", "+", "left_x", ",", "bbox", "[", "3", "]", "+", "lower_y", "]", ")", "return", "corrected_box_list" ]
The bounding box calculated from an image has coordinates relative to the lower-left point in the PDF being at zero. Similarly, Ghostscript reports a bounding box relative to a zero lower-left point. If the MediaBox (or full page box) has been shifted, like when cropping a previously cropped document, then we need to correct the bounding box by an additive translation on all the points.
[ "The", "bounding", "box", "calculated", "from", "an", "image", "has", "coordinates", "relative", "to", "the", "lower", "-", "left", "point", "in", "the", "PDF", "being", "at", "zero", ".", "Similarly", "Ghostscript", "reports", "a", "bounding", "box", "relative", "to", "a", "zero", "lower", "-", "left", "point", ".", "If", "the", "MediaBox", "(", "or", "full", "page", "box", ")", "has", "been", "shifted", "like", "when", "cropping", "a", "previously", "cropped", "document", "then", "we", "need", "to", "correct", "the", "bounding", "box", "by", "an", "additive", "translation", "on", "all", "the", "points", "." ]
python
train
52.866667
slarse/pdfebc-core
pdfebc_core/config_utils.py
https://github.com/slarse/pdfebc-core/blob/fc40857bc42365b7434714333e37d7a3487603a0/pdfebc_core/config_utils.py#L190-L203
def config_to_string(config): """Nice output string for the config, which is a nested defaultdict. Args: config (defaultdict(defaultdict)): The configuration information. Returns: str: A human-readable output string detailing the contents of the config. """ output = [] for section, section_content in config.items(): output.append("[{}]".format(section)) for option, option_value in section_content.items(): output.append("{} = {}".format(option, option_value)) return "\n".join(output)
[ "def", "config_to_string", "(", "config", ")", ":", "output", "=", "[", "]", "for", "section", ",", "section_content", "in", "config", ".", "items", "(", ")", ":", "output", ".", "append", "(", "\"[{}]\"", ".", "format", "(", "section", ")", ")", "for", "option", ",", "option_value", "in", "section_content", ".", "items", "(", ")", ":", "output", ".", "append", "(", "\"{} = {}\"", ".", "format", "(", "option", ",", "option_value", ")", ")", "return", "\"\\n\"", ".", "join", "(", "output", ")" ]
Nice output string for the config, which is a nested defaultdict. Args: config (defaultdict(defaultdict)): The configuration information. Returns: str: A human-readable output string detailing the contents of the config.
[ "Nice", "output", "string", "for", "the", "config", "which", "is", "a", "nested", "defaultdict", "." ]
python
train
39.071429
csirtgadgets/bearded-avenger-sdk-py
cifsdk/utils/zhelper.py
https://github.com/csirtgadgets/bearded-avenger-sdk-py/blob/2b3e96cb2e7703ee0402811096da8265a740f378/cifsdk/utils/zhelper.py#L44-L70
def zthread_fork(ctx, func, *args, **kwargs): """ Create an attached thread. An attached thread gets a ctx and a PAIR pipe back to its parent. It must monitor its pipe, and exit if the pipe becomes unreadable. Returns pipe, or NULL if there was an error. """ a = ctx.socket(zmq.PAIR) a.setsockopt(zmq.LINGER, 0) a.setsockopt(zmq.RCVHWM, 100) a.setsockopt(zmq.SNDHWM, 100) a.setsockopt(zmq.SNDTIMEO, 5000) a.setsockopt(zmq.RCVTIMEO, 5000) b = ctx.socket(zmq.PAIR) b.setsockopt(zmq.LINGER, 0) b.setsockopt(zmq.RCVHWM, 100) b.setsockopt(zmq.SNDHWM, 100) b.setsockopt(zmq.SNDTIMEO, 5000) a.setsockopt(zmq.RCVTIMEO, 5000) iface = "inproc://%s" % binascii.hexlify(os.urandom(8)) a.bind(iface) b.connect(iface) thread = threading.Thread(target=func, args=((ctx, b) + args), kwargs=kwargs) thread.daemon = False thread.start() return a
[ "def", "zthread_fork", "(", "ctx", ",", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "a", "=", "ctx", ".", "socket", "(", "zmq", ".", "PAIR", ")", "a", ".", "setsockopt", "(", "zmq", ".", "LINGER", ",", "0", ")", "a", ".", "setsockopt", "(", "zmq", ".", "RCVHWM", ",", "100", ")", "a", ".", "setsockopt", "(", "zmq", ".", "SNDHWM", ",", "100", ")", "a", ".", "setsockopt", "(", "zmq", ".", "SNDTIMEO", ",", "5000", ")", "a", ".", "setsockopt", "(", "zmq", ".", "RCVTIMEO", ",", "5000", ")", "b", "=", "ctx", ".", "socket", "(", "zmq", ".", "PAIR", ")", "b", ".", "setsockopt", "(", "zmq", ".", "LINGER", ",", "0", ")", "b", ".", "setsockopt", "(", "zmq", ".", "RCVHWM", ",", "100", ")", "b", ".", "setsockopt", "(", "zmq", ".", "SNDHWM", ",", "100", ")", "b", ".", "setsockopt", "(", "zmq", ".", "SNDTIMEO", ",", "5000", ")", "a", ".", "setsockopt", "(", "zmq", ".", "RCVTIMEO", ",", "5000", ")", "iface", "=", "\"inproc://%s\"", "%", "binascii", ".", "hexlify", "(", "os", ".", "urandom", "(", "8", ")", ")", "a", ".", "bind", "(", "iface", ")", "b", ".", "connect", "(", "iface", ")", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "func", ",", "args", "=", "(", "(", "ctx", ",", "b", ")", "+", "args", ")", ",", "kwargs", "=", "kwargs", ")", "thread", ".", "daemon", "=", "False", "thread", ".", "start", "(", ")", "return", "a" ]
Create an attached thread. An attached thread gets a ctx and a PAIR pipe back to its parent. It must monitor its pipe, and exit if the pipe becomes unreadable. Returns pipe, or NULL if there was an error.
[ "Create", "an", "attached", "thread", ".", "An", "attached", "thread", "gets", "a", "ctx", "and", "a", "PAIR", "pipe", "back", "to", "its", "parent", ".", "It", "must", "monitor", "its", "pipe", "and", "exit", "if", "the", "pipe", "becomes", "unreadable", ".", "Returns", "pipe", "or", "NULL", "if", "there", "was", "an", "error", "." ]
python
train
33.296296
cltl/KafNafParserPy
KafNafParserPy/KafNafParserMod.py
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/KafNafParserMod.py#L1133-L1140
def set_header(self,header): """ Sets the header of the object @type header: L{CHeader} @param header: the header object """ self.header = header self.root.insert(0,header.get_node())
[ "def", "set_header", "(", "self", ",", "header", ")", ":", "self", ".", "header", "=", "header", "self", ".", "root", ".", "insert", "(", "0", ",", "header", ".", "get_node", "(", ")", ")" ]
Sets the header of the object @type header: L{CHeader} @param header: the header object
[ "Sets", "the", "header", "of", "the", "object" ]
python
train
29
phoebe-project/phoebe2
phoebe/constraints/builtin.py
https://github.com/phoebe-project/phoebe2/blob/e64b8be683977064e2d55dd1b3ac400f64c3e379/phoebe/constraints/builtin.py#L145-L163
def requiv_to_pot_contact(requiv, q, sma, compno=1): """ :param requiv: user-provided equivalent radius :param q: mass ratio :param sma: semi-major axis (d = sma because we explicitly assume circular orbits for contacts) :param compno: 1 for primary, 2 for secondary :return: potential and fillout factor """ logger.debug("requiv_to_pot_contact(requiv={}, q={}, sma={}, compno={})".format(requiv, q, sma, compno)) # since the functions called here work with normalized r, we need to set d=D=sma=1. # or provide sma as a function parameter and normalize r here as requiv = requiv/sma requiv = requiv/sma vequiv = 4./3*np.pi*requiv**3 d = 1. F = 1. logger.debug("libphoebe.roche_contact_Omega_at_partial_vol(vol={}, phi=pi/2, q={}, d={}, choice={})".format(vequiv, q, d, compno-1)) return libphoebe.roche_contact_Omega_at_partial_vol(vequiv, np.pi/2, q, d, choice=compno-1)
[ "def", "requiv_to_pot_contact", "(", "requiv", ",", "q", ",", "sma", ",", "compno", "=", "1", ")", ":", "logger", ".", "debug", "(", "\"requiv_to_pot_contact(requiv={}, q={}, sma={}, compno={})\"", ".", "format", "(", "requiv", ",", "q", ",", "sma", ",", "compno", ")", ")", "# since the functions called here work with normalized r, we need to set d=D=sma=1.", "# or provide sma as a function parameter and normalize r here as requiv = requiv/sma", "requiv", "=", "requiv", "/", "sma", "vequiv", "=", "4.", "/", "3", "*", "np", ".", "pi", "*", "requiv", "**", "3", "d", "=", "1.", "F", "=", "1.", "logger", ".", "debug", "(", "\"libphoebe.roche_contact_Omega_at_partial_vol(vol={}, phi=pi/2, q={}, d={}, choice={})\"", ".", "format", "(", "vequiv", ",", "q", ",", "d", ",", "compno", "-", "1", ")", ")", "return", "libphoebe", ".", "roche_contact_Omega_at_partial_vol", "(", "vequiv", ",", "np", ".", "pi", "/", "2", ",", "q", ",", "d", ",", "choice", "=", "compno", "-", "1", ")" ]
:param requiv: user-provided equivalent radius :param q: mass ratio :param sma: semi-major axis (d = sma because we explicitly assume circular orbits for contacts) :param compno: 1 for primary, 2 for secondary :return: potential and fillout factor
[ ":", "param", "requiv", ":", "user", "-", "provided", "equivalent", "radius", ":", "param", "q", ":", "mass", "ratio", ":", "param", "sma", ":", "semi", "-", "major", "axis", "(", "d", "=", "sma", "because", "we", "explicitly", "assume", "circular", "orbits", "for", "contacts", ")", ":", "param", "compno", ":", "1", "for", "primary", "2", "for", "secondary", ":", "return", ":", "potential", "and", "fillout", "factor" ]
python
train
48.368421
iwanbk/nyamuk
nyamuk/nyamuk.py
https://github.com/iwanbk/nyamuk/blob/ac4c6028de288a4c8e0b332ae16eae889deb643d/nyamuk/nyamuk.py#L89-L95
def check_keepalive(self): """Send keepalive/PING if necessary.""" if self.sock != NC.INVALID_SOCKET and time.time() - self.last_msg_out >= self.keep_alive: if self.state == NC.CS_CONNECTED: self.send_pingreq() else: self.socket_close()
[ "def", "check_keepalive", "(", "self", ")", ":", "if", "self", ".", "sock", "!=", "NC", ".", "INVALID_SOCKET", "and", "time", ".", "time", "(", ")", "-", "self", ".", "last_msg_out", ">=", "self", ".", "keep_alive", ":", "if", "self", ".", "state", "==", "NC", ".", "CS_CONNECTED", ":", "self", ".", "send_pingreq", "(", ")", "else", ":", "self", ".", "socket_close", "(", ")" ]
Send keepalive/PING if necessary.
[ "Send", "keepalive", "/", "PING", "if", "necessary", "." ]
python
train
43.142857
bapakode/OmMongo
ommongo/query.py
https://github.com/bapakode/OmMongo/blob/52b5a5420516dc709f2d2eb065818c7973991ce3/ommongo/query.py#L381-L394
def query_bypass(self, query, raw_output=True): ''' Bypass query meaning that field check and validation is skipped, then query object directly executed by pymongo. :param raw_output: Skip OmMongo ORM layer (default: True) ''' if not isinstance(query, dict): raise BadQueryException('Query must be dict.') self.__query = query if raw_output: self._raw_output = True return self.__get_query_result().cursor else: return self
[ "def", "query_bypass", "(", "self", ",", "query", ",", "raw_output", "=", "True", ")", ":", "if", "not", "isinstance", "(", "query", ",", "dict", ")", ":", "raise", "BadQueryException", "(", "'Query must be dict.'", ")", "self", ".", "__query", "=", "query", "if", "raw_output", ":", "self", ".", "_raw_output", "=", "True", "return", "self", ".", "__get_query_result", "(", ")", ".", "cursor", "else", ":", "return", "self" ]
Bypass query meaning that field check and validation is skipped, then query object directly executed by pymongo. :param raw_output: Skip OmMongo ORM layer (default: True)
[ "Bypass", "query", "meaning", "that", "field", "check", "and", "validation", "is", "skipped", "then", "query", "object", "directly", "executed", "by", "pymongo", ".", ":", "param", "raw_output", ":", "Skip", "OmMongo", "ORM", "layer", "(", "default", ":", "True", ")" ]
python
train
37.714286
pytroll/pyspectral
rsr_convert_scripts/slstr_rsr.py
https://github.com/pytroll/pyspectral/blob/fd296c0e0bdf5364fa180134a1292665d6bc50a3/rsr_convert_scripts/slstr_rsr.py#L69-L79
def _load(self, scale=1.0): """Load the SLSTR relative spectral responses """ LOG.debug("File: %s", str(self.requested_band_filename)) ncf = Dataset(self.requested_band_filename, 'r') wvl = ncf.variables['wavelength'][:] * scale resp = ncf.variables['response'][:] self.rsr = {'wavelength': wvl, 'response': resp}
[ "def", "_load", "(", "self", ",", "scale", "=", "1.0", ")", ":", "LOG", ".", "debug", "(", "\"File: %s\"", ",", "str", "(", "self", ".", "requested_band_filename", ")", ")", "ncf", "=", "Dataset", "(", "self", ".", "requested_band_filename", ",", "'r'", ")", "wvl", "=", "ncf", ".", "variables", "[", "'wavelength'", "]", "[", ":", "]", "*", "scale", "resp", "=", "ncf", ".", "variables", "[", "'response'", "]", "[", ":", "]", "self", ".", "rsr", "=", "{", "'wavelength'", ":", "wvl", ",", "'response'", ":", "resp", "}" ]
Load the SLSTR relative spectral responses
[ "Load", "the", "SLSTR", "relative", "spectral", "responses" ]
python
train
32.909091
deepmipt/DeepPavlov
deeppavlov/models/classifiers/utils.py
https://github.com/deepmipt/DeepPavlov/blob/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c/deeppavlov/models/classifiers/utils.py#L77-L89
def proba2onehot(proba: [list, np.ndarray], confident_threshold: float, classes: [list, np.ndarray]) -> np.ndarray: """ Convert vectors of probabilities to one-hot representations using confident threshold Args: proba: samples where each sample is a vector of probabilities to belong with given classes confident_threshold: boundary of probability to belong with a class classes: array of classes' names Returns: 2d array with one-hot representation of given samples """ return labels2onehot(proba2labels(proba, confident_threshold, classes), classes)
[ "def", "proba2onehot", "(", "proba", ":", "[", "list", ",", "np", ".", "ndarray", "]", ",", "confident_threshold", ":", "float", ",", "classes", ":", "[", "list", ",", "np", ".", "ndarray", "]", ")", "->", "np", ".", "ndarray", ":", "return", "labels2onehot", "(", "proba2labels", "(", "proba", ",", "confident_threshold", ",", "classes", ")", ",", "classes", ")" ]
Convert vectors of probabilities to one-hot representations using confident threshold Args: proba: samples where each sample is a vector of probabilities to belong with given classes confident_threshold: boundary of probability to belong with a class classes: array of classes' names Returns: 2d array with one-hot representation of given samples
[ "Convert", "vectors", "of", "probabilities", "to", "one", "-", "hot", "representations", "using", "confident", "threshold" ]
python
test
46
ngmarchant/oasis
oasis/oasis.py
https://github.com/ngmarchant/oasis/blob/28a037a8924b85ae97db8a93960a910a219d6a4a/oasis/oasis.py#L115-L131
def update(self, ell, k): """Update the posterior and estimates after a label is sampled Parameters ---------- ell : int sampled label: 0 or 1 k : int index of stratum where label was sampled """ self.alpha_[k] += ell self.beta_[k] += 1 - ell self._calc_theta() if self.store_variance: self._calc_var_theta()
[ "def", "update", "(", "self", ",", "ell", ",", "k", ")", ":", "self", ".", "alpha_", "[", "k", "]", "+=", "ell", "self", ".", "beta_", "[", "k", "]", "+=", "1", "-", "ell", "self", ".", "_calc_theta", "(", ")", "if", "self", ".", "store_variance", ":", "self", ".", "_calc_var_theta", "(", ")" ]
Update the posterior and estimates after a label is sampled Parameters ---------- ell : int sampled label: 0 or 1 k : int index of stratum where label was sampled
[ "Update", "the", "posterior", "and", "estimates", "after", "a", "label", "is", "sampled" ]
python
train
24.176471
zhmcclient/python-zhmcclient
zhmcclient_mock/_hmc.py
https://github.com/zhmcclient/python-zhmcclient/blob/9657563e5d9184c51d3c903442a58b9725fdf335/zhmcclient_mock/_hmc.py#L2995-L3020
def get_metric_group_definitions(self): """ Get the faked metric group definitions for this context object that are to be returned from its create operation. If a 'metric-groups' property had been specified for this context, only those faked metric group definitions of its manager object that are in that list, are included in the result. Otherwise, all metric group definitions of its manager are included in the result. Returns: iterable of :class:~zhmcclient.FakedMetricGroupDefinition`: The faked metric group definitions, in the order they had been added. """ group_names = self.properties.get('metric-groups', None) if not group_names: group_names = self.manager.get_metric_group_definition_names() mg_defs = [] for group_name in group_names: try: mg_def = self.manager.get_metric_group_definition(group_name) mg_defs.append(mg_def) except ValueError: pass # ignore metric groups without metric group defs return mg_defs
[ "def", "get_metric_group_definitions", "(", "self", ")", ":", "group_names", "=", "self", ".", "properties", ".", "get", "(", "'metric-groups'", ",", "None", ")", "if", "not", "group_names", ":", "group_names", "=", "self", ".", "manager", ".", "get_metric_group_definition_names", "(", ")", "mg_defs", "=", "[", "]", "for", "group_name", "in", "group_names", ":", "try", ":", "mg_def", "=", "self", ".", "manager", ".", "get_metric_group_definition", "(", "group_name", ")", "mg_defs", ".", "append", "(", "mg_def", ")", "except", "ValueError", ":", "pass", "# ignore metric groups without metric group defs", "return", "mg_defs" ]
Get the faked metric group definitions for this context object that are to be returned from its create operation. If a 'metric-groups' property had been specified for this context, only those faked metric group definitions of its manager object that are in that list, are included in the result. Otherwise, all metric group definitions of its manager are included in the result. Returns: iterable of :class:~zhmcclient.FakedMetricGroupDefinition`: The faked metric group definitions, in the order they had been added.
[ "Get", "the", "faked", "metric", "group", "definitions", "for", "this", "context", "object", "that", "are", "to", "be", "returned", "from", "its", "create", "operation", "." ]
python
train
43.230769
noxdafox/clipspy
clips/classes.py
https://github.com/noxdafox/clipspy/blob/b22d71a6da821c1715d8fa00d7d75cabc09ed364/clips/classes.py#L166-L185
def restore_instances(self, instances): """Restore a set of instances into the CLIPS data base. The Python equivalent of the CLIPS restore-instances command. Instances can be passed as a set of strings or as a file. """ instances = instances.encode() if os.path.exists(instances): ret = lib.EnvRestoreInstances(self._env, instances) if ret == -1: raise CLIPSError(self._env) else: ret = lib.EnvRestoreInstancesFromString(self._env, instances, -1) if ret == -1: raise CLIPSError(self._env) return ret
[ "def", "restore_instances", "(", "self", ",", "instances", ")", ":", "instances", "=", "instances", ".", "encode", "(", ")", "if", "os", ".", "path", ".", "exists", "(", "instances", ")", ":", "ret", "=", "lib", ".", "EnvRestoreInstances", "(", "self", ".", "_env", ",", "instances", ")", "if", "ret", "==", "-", "1", ":", "raise", "CLIPSError", "(", "self", ".", "_env", ")", "else", ":", "ret", "=", "lib", ".", "EnvRestoreInstancesFromString", "(", "self", ".", "_env", ",", "instances", ",", "-", "1", ")", "if", "ret", "==", "-", "1", ":", "raise", "CLIPSError", "(", "self", ".", "_env", ")", "return", "ret" ]
Restore a set of instances into the CLIPS data base. The Python equivalent of the CLIPS restore-instances command. Instances can be passed as a set of strings or as a file.
[ "Restore", "a", "set", "of", "instances", "into", "the", "CLIPS", "data", "base", "." ]
python
train
31.45
Opentrons/opentrons
api/src/opentrons/system/nmcli.py
https://github.com/Opentrons/opentrons/blob/a7c15cc2636ecb64ab56c7edc1d8a57163aaeadf/api/src/opentrons/system/nmcli.py#L595-L607
def sanitize_args(cmd: List[str]) -> List[str]: """ Filter the command so that it no longer contains passwords """ sanitized = [] for idx, fieldname in enumerate(cmd): def _is_password(cmdstr): return 'wifi-sec.psk' in cmdstr\ or 'password' in cmdstr.lower() if idx > 0 and _is_password(cmd[idx-1]): sanitized.append('****') else: sanitized.append(fieldname) return sanitized
[ "def", "sanitize_args", "(", "cmd", ":", "List", "[", "str", "]", ")", "->", "List", "[", "str", "]", ":", "sanitized", "=", "[", "]", "for", "idx", ",", "fieldname", "in", "enumerate", "(", "cmd", ")", ":", "def", "_is_password", "(", "cmdstr", ")", ":", "return", "'wifi-sec.psk'", "in", "cmdstr", "or", "'password'", "in", "cmdstr", ".", "lower", "(", ")", "if", "idx", ">", "0", "and", "_is_password", "(", "cmd", "[", "idx", "-", "1", "]", ")", ":", "sanitized", ".", "append", "(", "'****'", ")", "else", ":", "sanitized", ".", "append", "(", "fieldname", ")", "return", "sanitized" ]
Filter the command so that it no longer contains passwords
[ "Filter", "the", "command", "so", "that", "it", "no", "longer", "contains", "passwords" ]
python
train
35.307692
geophysics-ubonn/reda
lib/reda/containers/sEIT.py
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/containers/sEIT.py#L357-L419
def get_spectrum(self, nr_id=None, abmn=None, plot_filename=None): """Return a spectrum and its reciprocal counter part, if present in the dataset. Optimally, refer to the spectrum by its normal-reciprocal id. Returns ------- spectrum_nor : :py:class:`reda.eis.plots.sip_response` Normal spectrum. None if no normal spectrum is available spectrum_rec : :py:class:`reda.eis.plots.sip_response` or None Reciprocal spectrum. None if no reciprocal spectrum is available fig : :py:class:`matplotlib.Figure.Figure` , optional Figure object (only if plot_filename is set) """ assert nr_id is None or abmn is None # determine nr_id for given abmn tuple if abmn is not None: subdata = self.data.query( 'a == {} and b == {} and m == {} and n == {}'.format(*abmn) ).sort_values('frequency') if subdata.shape[0] == 0: return None, None # determine the norrec-id of this spectrum nr_id = subdata['id'].iloc[0] # get spectra subdata_nor = self.data.query( 'id == {} and norrec=="nor"'.format(nr_id) ).sort_values('frequency') subdata_rec = self.data.query( 'id == {} and norrec=="rec"'.format(nr_id) ).sort_values('frequency') # create spectrum objects spectrum_nor = None spectrum_rec = None if subdata_nor.shape[0] > 0: spectrum_nor = eis_plot.sip_response( frequencies=subdata_nor['frequency'].values, rmag=subdata_nor['r'], rpha=subdata_nor['rpha'], ) if subdata_rec.shape[0] > 0: spectrum_rec = eis_plot.sip_response( frequencies=subdata_rec['frequency'].values, rmag=subdata_rec['r'], rpha=subdata_rec['rpha'], ) if plot_filename is not None: if spectrum_nor is not None: fig = spectrum_nor.plot( plot_filename, reciprocal=spectrum_rec, return_fig=True, title='a: {} b: {} m: {}: n: {}'.format( *subdata_nor[['a', 'b', 'm', 'n']].values[0, :] ) ) return spectrum_nor, spectrum_rec, fig return spectrum_nor, spectrum_rec
[ "def", "get_spectrum", "(", "self", ",", "nr_id", "=", "None", ",", "abmn", "=", "None", ",", "plot_filename", "=", "None", ")", ":", "assert", "nr_id", "is", "None", "or", "abmn", "is", "None", "# determine nr_id for given abmn tuple", "if", "abmn", "is", "not", "None", ":", "subdata", "=", "self", ".", "data", ".", "query", "(", "'a == {} and b == {} and m == {} and n == {}'", ".", "format", "(", "*", "abmn", ")", ")", ".", "sort_values", "(", "'frequency'", ")", "if", "subdata", ".", "shape", "[", "0", "]", "==", "0", ":", "return", "None", ",", "None", "# determine the norrec-id of this spectrum", "nr_id", "=", "subdata", "[", "'id'", "]", ".", "iloc", "[", "0", "]", "# get spectra", "subdata_nor", "=", "self", ".", "data", ".", "query", "(", "'id == {} and norrec==\"nor\"'", ".", "format", "(", "nr_id", ")", ")", ".", "sort_values", "(", "'frequency'", ")", "subdata_rec", "=", "self", ".", "data", ".", "query", "(", "'id == {} and norrec==\"rec\"'", ".", "format", "(", "nr_id", ")", ")", ".", "sort_values", "(", "'frequency'", ")", "# create spectrum objects", "spectrum_nor", "=", "None", "spectrum_rec", "=", "None", "if", "subdata_nor", ".", "shape", "[", "0", "]", ">", "0", ":", "spectrum_nor", "=", "eis_plot", ".", "sip_response", "(", "frequencies", "=", "subdata_nor", "[", "'frequency'", "]", ".", "values", ",", "rmag", "=", "subdata_nor", "[", "'r'", "]", ",", "rpha", "=", "subdata_nor", "[", "'rpha'", "]", ",", ")", "if", "subdata_rec", ".", "shape", "[", "0", "]", ">", "0", ":", "spectrum_rec", "=", "eis_plot", ".", "sip_response", "(", "frequencies", "=", "subdata_rec", "[", "'frequency'", "]", ".", "values", ",", "rmag", "=", "subdata_rec", "[", "'r'", "]", ",", "rpha", "=", "subdata_rec", "[", "'rpha'", "]", ",", ")", "if", "plot_filename", "is", "not", "None", ":", "if", "spectrum_nor", "is", "not", "None", ":", "fig", "=", "spectrum_nor", ".", "plot", "(", "plot_filename", ",", "reciprocal", "=", "spectrum_rec", ",", "return_fig", "=", "True", ",", "title", "=", "'a: {} b: {} m: {}: n: {}'", ".", "format", "(", "*", "subdata_nor", "[", "[", "'a'", ",", "'b'", ",", "'m'", ",", "'n'", "]", "]", ".", "values", "[", "0", ",", ":", "]", ")", ")", "return", "spectrum_nor", ",", "spectrum_rec", ",", "fig", "return", "spectrum_nor", ",", "spectrum_rec" ]
Return a spectrum and its reciprocal counter part, if present in the dataset. Optimally, refer to the spectrum by its normal-reciprocal id. Returns ------- spectrum_nor : :py:class:`reda.eis.plots.sip_response` Normal spectrum. None if no normal spectrum is available spectrum_rec : :py:class:`reda.eis.plots.sip_response` or None Reciprocal spectrum. None if no reciprocal spectrum is available fig : :py:class:`matplotlib.Figure.Figure` , optional Figure object (only if plot_filename is set)
[ "Return", "a", "spectrum", "and", "its", "reciprocal", "counter", "part", "if", "present", "in", "the", "dataset", ".", "Optimally", "refer", "to", "the", "spectrum", "by", "its", "normal", "-", "reciprocal", "id", "." ]
python
train
38.412698
krukas/Trionyx
trionyx/quickstart/__init__.py
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/quickstart/__init__.py#L30-L45
def create_project(self, project_path): """ Create Trionyx project in given path :param str path: path to create project in. :raises FileExistsError: """ shutil.copytree(self.project_path, project_path) self.update_file(project_path, 'requirements.txt', { 'trionyx_version': trionyx.__version__ }) self.update_file(project_path, 'config/local_settings.py', { 'secret_key': utils.random_string(32) })
[ "def", "create_project", "(", "self", ",", "project_path", ")", ":", "shutil", ".", "copytree", "(", "self", ".", "project_path", ",", "project_path", ")", "self", ".", "update_file", "(", "project_path", ",", "'requirements.txt'", ",", "{", "'trionyx_version'", ":", "trionyx", ".", "__version__", "}", ")", "self", ".", "update_file", "(", "project_path", ",", "'config/local_settings.py'", ",", "{", "'secret_key'", ":", "utils", ".", "random_string", "(", "32", ")", "}", ")" ]
Create Trionyx project in given path :param str path: path to create project in. :raises FileExistsError:
[ "Create", "Trionyx", "project", "in", "given", "path" ]
python
train
30.6875
materialsproject/pymatgen-db
matgendb/query_engine.py
https://github.com/materialsproject/pymatgen-db/blob/02e4351c2cea431407644f49193e8bf43ed39b9a/matgendb/query_engine.py#L451-L456
def query_one(self, *args, **kwargs): """Return first document from :meth:`query`, with same parameters. """ for r in self.query(*args, **kwargs): return r return None
[ "def", "query_one", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "for", "r", "in", "self", ".", "query", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "r", "return", "None" ]
Return first document from :meth:`query`, with same parameters.
[ "Return", "first", "document", "from", ":", "meth", ":", "query", "with", "same", "parameters", "." ]
python
train
34.333333
PmagPy/PmagPy
programs/demag_gui.py
https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/demag_gui.py#L3448-L3464
def recalculate_current_specimen_interpreatations(self): """ recalculates all interpretations on all specimens for all coordinate systems. Does not display recalcuated data. """ self.initialize_CART_rot(self.s) if str(self.s) in self.pmag_results_data['specimens']: for fit in self.pmag_results_data['specimens'][self.s]: if fit.get('specimen') and 'calculation_type' in fit.get('specimen'): fit.put(self.s, 'specimen', self.get_PCA_parameters( self.s, fit, fit.tmin, fit.tmax, 'specimen', fit.get('specimen')['calculation_type'])) if len(self.Data[self.s]['zijdblock_geo']) > 0 and fit.get('geographic') and 'calculation_type' in fit.get('geographic'): fit.put(self.s, 'geographic', self.get_PCA_parameters( self.s, fit, fit.tmin, fit.tmax, 'geographic', fit.get('geographic')['calculation_type'])) if len(self.Data[self.s]['zijdblock_tilt']) > 0 and fit.get('tilt-corrected') and 'calculation_type' in fit.get('tilt-corrected'): fit.put(self.s, 'tilt-corrected', self.get_PCA_parameters(self.s, fit, fit.tmin, fit.tmax, 'tilt-corrected', fit.get('tilt-corrected')['calculation_type']))
[ "def", "recalculate_current_specimen_interpreatations", "(", "self", ")", ":", "self", ".", "initialize_CART_rot", "(", "self", ".", "s", ")", "if", "str", "(", "self", ".", "s", ")", "in", "self", ".", "pmag_results_data", "[", "'specimens'", "]", ":", "for", "fit", "in", "self", ".", "pmag_results_data", "[", "'specimens'", "]", "[", "self", ".", "s", "]", ":", "if", "fit", ".", "get", "(", "'specimen'", ")", "and", "'calculation_type'", "in", "fit", ".", "get", "(", "'specimen'", ")", ":", "fit", ".", "put", "(", "self", ".", "s", ",", "'specimen'", ",", "self", ".", "get_PCA_parameters", "(", "self", ".", "s", ",", "fit", ",", "fit", ".", "tmin", ",", "fit", ".", "tmax", ",", "'specimen'", ",", "fit", ".", "get", "(", "'specimen'", ")", "[", "'calculation_type'", "]", ")", ")", "if", "len", "(", "self", ".", "Data", "[", "self", ".", "s", "]", "[", "'zijdblock_geo'", "]", ")", ">", "0", "and", "fit", ".", "get", "(", "'geographic'", ")", "and", "'calculation_type'", "in", "fit", ".", "get", "(", "'geographic'", ")", ":", "fit", ".", "put", "(", "self", ".", "s", ",", "'geographic'", ",", "self", ".", "get_PCA_parameters", "(", "self", ".", "s", ",", "fit", ",", "fit", ".", "tmin", ",", "fit", ".", "tmax", ",", "'geographic'", ",", "fit", ".", "get", "(", "'geographic'", ")", "[", "'calculation_type'", "]", ")", ")", "if", "len", "(", "self", ".", "Data", "[", "self", ".", "s", "]", "[", "'zijdblock_tilt'", "]", ")", ">", "0", "and", "fit", ".", "get", "(", "'tilt-corrected'", ")", "and", "'calculation_type'", "in", "fit", ".", "get", "(", "'tilt-corrected'", ")", ":", "fit", ".", "put", "(", "self", ".", "s", ",", "'tilt-corrected'", ",", "self", ".", "get_PCA_parameters", "(", "self", ".", "s", ",", "fit", ",", "fit", ".", "tmin", ",", "fit", ".", "tmax", ",", "'tilt-corrected'", ",", "fit", ".", "get", "(", "'tilt-corrected'", ")", "[", "'calculation_type'", "]", ")", ")" ]
recalculates all interpretations on all specimens for all coordinate systems. Does not display recalcuated data.
[ "recalculates", "all", "interpretations", "on", "all", "specimens", "for", "all", "coordinate", "systems", ".", "Does", "not", "display", "recalcuated", "data", "." ]
python
train
80.294118
ambitioninc/django-entity
entity/sync.py
https://github.com/ambitioninc/django-entity/blob/ebc61f34313c52f4ef5819eb1da25b2ad837e80c/entity/sync.py#L169-L176
def sync_entities_watching(instance): """ Syncs entities watching changes of a model instance. """ for entity_model, entity_model_getter in entity_registry.entity_watching[instance.__class__]: model_objs = list(entity_model_getter(instance)) if model_objs: sync_entities(*model_objs)
[ "def", "sync_entities_watching", "(", "instance", ")", ":", "for", "entity_model", ",", "entity_model_getter", "in", "entity_registry", ".", "entity_watching", "[", "instance", ".", "__class__", "]", ":", "model_objs", "=", "list", "(", "entity_model_getter", "(", "instance", ")", ")", "if", "model_objs", ":", "sync_entities", "(", "*", "model_objs", ")" ]
Syncs entities watching changes of a model instance.
[ "Syncs", "entities", "watching", "changes", "of", "a", "model", "instance", "." ]
python
train
40
StackStorm/pybind
pybind/nos/v6_0_2f/rbridge_id/interface/ve/ipv6/ipv6_local_anycast_gateway/ipv6_track/__init__.py
https://github.com/StackStorm/pybind/blob/44c467e71b2b425be63867aba6e6fa28b2cfe7fb/pybind/nos/v6_0_2f/rbridge_id/interface/ve/ipv6/ipv6_local_anycast_gateway/ipv6_track/__init__.py#L96-L117
def _set_ipv6_interface(self, v, load=False): """ Setter method for ipv6_interface, mapped from YANG variable /rbridge_id/interface/ve/ipv6/ipv6_local_anycast_gateway/ipv6_track/ipv6_interface (list) If this variable is read-only (config: false) in the source YANG file, then _set_ipv6_interface is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ipv6_interface() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("ipv6_interface_type ipv6_interface_name",ipv6_interface.ipv6_interface, yang_name="ipv6-interface", rest_name="interface", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ipv6-interface-type ipv6-interface-name', extensions={u'tailf-common': {u'callpoint': u'AnycastGatewayLocalIpv6TrackInterfaceConfig', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'interface', u'cli-incomplete-command': None, u'cli-no-match-completion': None, u'cli-full-no': None}}), is_container='list', yang_name="ipv6-interface", rest_name="interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'AnycastGatewayLocalIpv6TrackInterfaceConfig', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'interface', u'cli-incomplete-command': None, u'cli-no-match-completion': None, u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-anycast-gateway', defining_module='brocade-anycast-gateway', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """ipv6_interface must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("ipv6_interface_type ipv6_interface_name",ipv6_interface.ipv6_interface, yang_name="ipv6-interface", rest_name="interface", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ipv6-interface-type ipv6-interface-name', extensions={u'tailf-common': {u'callpoint': u'AnycastGatewayLocalIpv6TrackInterfaceConfig', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'interface', u'cli-incomplete-command': None, u'cli-no-match-completion': None, u'cli-full-no': None}}), is_container='list', yang_name="ipv6-interface", rest_name="interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'AnycastGatewayLocalIpv6TrackInterfaceConfig', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'interface', u'cli-incomplete-command': None, u'cli-no-match-completion': None, u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-anycast-gateway', defining_module='brocade-anycast-gateway', yang_type='list', is_config=True)""", }) self.__ipv6_interface = t if hasattr(self, '_set'): self._set()
[ "def", "_set_ipv6_interface", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "YANGListType", "(", "\"ipv6_interface_type ipv6_interface_name\"", ",", "ipv6_interface", ".", "ipv6_interface", ",", "yang_name", "=", "\"ipv6-interface\"", ",", "rest_name", "=", "\"interface\"", ",", "parent", "=", "self", ",", "is_container", "=", "'list'", ",", "user_ordered", "=", "False", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "yang_keys", "=", "'ipv6-interface-type ipv6-interface-name'", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'callpoint'", ":", "u'AnycastGatewayLocalIpv6TrackInterfaceConfig'", ",", "u'cli-suppress-mode'", ":", "None", ",", "u'cli-suppress-list-no'", ":", "None", ",", "u'alt-name'", ":", "u'interface'", ",", "u'cli-incomplete-command'", ":", "None", ",", "u'cli-no-match-completion'", ":", "None", ",", "u'cli-full-no'", ":", "None", "}", "}", ")", ",", "is_container", "=", "'list'", ",", "yang_name", "=", "\"ipv6-interface\"", ",", "rest_name", "=", "\"interface\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'callpoint'", ":", "u'AnycastGatewayLocalIpv6TrackInterfaceConfig'", ",", "u'cli-suppress-mode'", ":", "None", ",", "u'cli-suppress-list-no'", ":", "None", ",", "u'alt-name'", ":", "u'interface'", ",", "u'cli-incomplete-command'", ":", "None", ",", "u'cli-no-match-completion'", ":", "None", ",", "u'cli-full-no'", ":", "None", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-anycast-gateway'", ",", "defining_module", "=", "'brocade-anycast-gateway'", ",", "yang_type", "=", "'list'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"ipv6_interface must be of a type compatible with list\"\"\"", ",", "'defined-type'", ":", "\"list\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=YANGListType(\"ipv6_interface_type ipv6_interface_name\",ipv6_interface.ipv6_interface, yang_name=\"ipv6-interface\", rest_name=\"interface\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ipv6-interface-type ipv6-interface-name', extensions={u'tailf-common': {u'callpoint': u'AnycastGatewayLocalIpv6TrackInterfaceConfig', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'interface', u'cli-incomplete-command': None, u'cli-no-match-completion': None, u'cli-full-no': None}}), is_container='list', yang_name=\"ipv6-interface\", rest_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'AnycastGatewayLocalIpv6TrackInterfaceConfig', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'interface', u'cli-incomplete-command': None, u'cli-no-match-completion': None, u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-anycast-gateway', defining_module='brocade-anycast-gateway', yang_type='list', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__ipv6_interface", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
Setter method for ipv6_interface, mapped from YANG variable /rbridge_id/interface/ve/ipv6/ipv6_local_anycast_gateway/ipv6_track/ipv6_interface (list) If this variable is read-only (config: false) in the source YANG file, then _set_ipv6_interface is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ipv6_interface() directly.
[ "Setter", "method", "for", "ipv6_interface", "mapped", "from", "YANG", "variable", "/", "rbridge_id", "/", "interface", "/", "ve", "/", "ipv6", "/", "ipv6_local_anycast_gateway", "/", "ipv6_track", "/", "ipv6_interface", "(", "list", ")", "If", "this", "variable", "is", "read", "-", "only", "(", "config", ":", "false", ")", "in", "the", "source", "YANG", "file", "then", "_set_ipv6_interface", "is", "considered", "as", "a", "private", "method", ".", "Backends", "looking", "to", "populate", "this", "variable", "should", "do", "so", "via", "calling", "thisObj", ".", "_set_ipv6_interface", "()", "directly", "." ]
python
train
140.636364
saltstack/salt
salt/modules/riak.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/riak.py#L52-L74
def stop(): ''' Stop Riak .. versionchanged:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' riak.stop ''' ret = {'comment': '', 'success': False} cmd = __execute_cmd('riak', 'stop') if cmd['retcode'] != 0: ret['comment'] = cmd['stderr'] else: ret['comment'] = cmd['stdout'] ret['success'] = True return ret
[ "def", "stop", "(", ")", ":", "ret", "=", "{", "'comment'", ":", "''", ",", "'success'", ":", "False", "}", "cmd", "=", "__execute_cmd", "(", "'riak'", ",", "'stop'", ")", "if", "cmd", "[", "'retcode'", "]", "!=", "0", ":", "ret", "[", "'comment'", "]", "=", "cmd", "[", "'stderr'", "]", "else", ":", "ret", "[", "'comment'", "]", "=", "cmd", "[", "'stdout'", "]", "ret", "[", "'success'", "]", "=", "True", "return", "ret" ]
Stop Riak .. versionchanged:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' riak.stop
[ "Stop", "Riak" ]
python
train
16.217391
mardix/pylot
pylot/component/views.py
https://github.com/mardix/pylot/blob/506a33a56ebdfc0925b94015e8cf98ccb16a143c/pylot/component/views.py#L1080-L1254
def user_admin_view(model, login_view="Login", template_dir=None): """ :param UserStruct: The User model structure containing other classes :param login_view: The login view interface :param template_dir: The directory containing the view pages :return: UserAdmin Doc: User Admin is a view that allows you to admin users. You must create a Pylot view called `UserAdmin` to activate it UserAdmin = app.views.user_admin(User, Login) class UserAdmin(UserAdmin, Pylot): pass The user admin create some global available vars under '__.user_admin' It's also best to add some security access on it class UserAdmin(UserAdmin, Pylot): decorators = [login_required] You can customize the user info page (::get) by creating the directory in your templates dir, and include the get.html inside of it ie: >/admin/templates/UserAdmin/get.html <div> {% include "Pylot/UserAdmin/get.html" %} <div> <div>Hello {{ __.user_admin.user.name }}<div> """ Pylot.context_(COMPONENT_USER_ADMIN=True) User = model.UserStruct.User LoginView = login_view if not template_dir: template_dir = "Pylot/UserAdmin" template_page = template_dir + "/%s.html" class UserAdmin(object): route_base = "user-admin" @classmethod def _options(cls): return { "user_role": [("Rol", "Role")], #[(role, role) for i, role in enumerate(.all_roles)], "user_status": [("Sta", "Stat")] #[(status, status) for i, status in enumerate(User.all_status)] } @classmethod def search_handler(cls, per_page=20): """ To initiate a search """ page = request.args.get("page", 1) show_deleted = True if request.args.get("show-deleted") else False name = request.args.get("name") email = request.args.get("email") users = User.all(include_deleted=show_deleted) users = users.order_by(User.name.asc()) if name: users = users.filter(User.name.contains(name)) if email: users = users.filter(User.email.contains(email)) users = users.paginate(page=page, per_page=per_page) cls.__(user_admin=dict( options=cls._options(), users=users, search_query={ "excluded_deleted": request.args.get("show-deleted"), "role": request.args.get("role"), "status": request.args.get("status"), "name": request.args.get("name"), "email": request.args.get("email") } )) return users @classmethod def get_user_handler(cls, id): """ Get a user """ user = User.get(id, include_deleted=True) if not user: abort(404, "User doesn't exist") cls.__(user_admin=dict(user=user, options=cls._options())) return user def index(self): self.search_handler() return self.render(view_template=template_page % "index") def get(self, id): self.get_user_handler(id) return self.render(view_template=template_page % "get") def post(self): try: id = request.form.get("id") user = User.get(id, include_deleted=True) if not user: self.error_("Can't change user info. Invalid user") return redirect(url_for("UserAdmin:index")) delete_entry = True if request.form.get("delete-entry") else False if delete_entry: user.update(status=user.STATUS_SUSPENDED) user.delete() self.success_("User DELETED Successfully!") return redirect(url_for("UserAdmin:get", id=id)) email = request.form.get("email") password = request.form.get("password") password2 = request.form.get("password2") name = request.form.get("name") role = request.form.get("user_role") status = request.form.get("user_status") upd = {} if email and email != user.email: LoginView.change_login_handler(user_context=user) if password and password2: LoginView.change_password_handler(user_context=user) if name != user.name: upd.update({"name": name}) if role and role != user.role: upd.update({"role": role}) if status and status != user.status: if user.is_deleted and status == user.STATUS_ACTIVE: user.delete(False) upd.update({"status": status}) if upd: user.update(**upd) self.success_("User's Info updated successfully!") except Exception as ex: self.error_("Error: %s " % ex.message) return redirect(url_for("UserAdmin:get", id=id)) @route("reset-password", methods=["POST"]) def reset_password(self): try: id = request.form.get("id") user = User.get(id) if not user: self.error_("Can't reset password. Invalid user") return redirect(url_for("User:index")) password = LoginView.reset_password_handler(user_context=user) self.success_("User's password reset successfully!") except Exception as ex: self.error_("Error: %s " % ex.message) return redirect(url_for("UserAdmin:get", id=id)) @route("create", methods=["POST"]) def create(self): try: account = LoginView.signup_handler() account.set_role(request.form.get("role", "USER")) self.success_("User created successfully!") return redirect(url_for("UserAdmin:get", id=account.id)) except Exception as ex: self.error_("Error: %s" % ex.message) return redirect(url_for("UserAdmin:index")) return UserAdmin
[ "def", "user_admin_view", "(", "model", ",", "login_view", "=", "\"Login\"", ",", "template_dir", "=", "None", ")", ":", "Pylot", ".", "context_", "(", "COMPONENT_USER_ADMIN", "=", "True", ")", "User", "=", "model", ".", "UserStruct", ".", "User", "LoginView", "=", "login_view", "if", "not", "template_dir", ":", "template_dir", "=", "\"Pylot/UserAdmin\"", "template_page", "=", "template_dir", "+", "\"/%s.html\"", "class", "UserAdmin", "(", "object", ")", ":", "route_base", "=", "\"user-admin\"", "@", "classmethod", "def", "_options", "(", "cls", ")", ":", "return", "{", "\"user_role\"", ":", "[", "(", "\"Rol\"", ",", "\"Role\"", ")", "]", ",", "#[(role, role) for i, role in enumerate(.all_roles)],", "\"user_status\"", ":", "[", "(", "\"Sta\"", ",", "\"Stat\"", ")", "]", "#[(status, status) for i, status in enumerate(User.all_status)]", "}", "@", "classmethod", "def", "search_handler", "(", "cls", ",", "per_page", "=", "20", ")", ":", "\"\"\"\n To initiate a search\n \"\"\"", "page", "=", "request", ".", "args", ".", "get", "(", "\"page\"", ",", "1", ")", "show_deleted", "=", "True", "if", "request", ".", "args", ".", "get", "(", "\"show-deleted\"", ")", "else", "False", "name", "=", "request", ".", "args", ".", "get", "(", "\"name\"", ")", "email", "=", "request", ".", "args", ".", "get", "(", "\"email\"", ")", "users", "=", "User", ".", "all", "(", "include_deleted", "=", "show_deleted", ")", "users", "=", "users", ".", "order_by", "(", "User", ".", "name", ".", "asc", "(", ")", ")", "if", "name", ":", "users", "=", "users", ".", "filter", "(", "User", ".", "name", ".", "contains", "(", "name", ")", ")", "if", "email", ":", "users", "=", "users", ".", "filter", "(", "User", ".", "email", ".", "contains", "(", "email", ")", ")", "users", "=", "users", ".", "paginate", "(", "page", "=", "page", ",", "per_page", "=", "per_page", ")", "cls", ".", "__", "(", "user_admin", "=", "dict", "(", "options", "=", "cls", ".", "_options", "(", ")", ",", "users", "=", "users", ",", "search_query", "=", "{", "\"excluded_deleted\"", ":", "request", ".", "args", ".", "get", "(", "\"show-deleted\"", ")", ",", "\"role\"", ":", "request", ".", "args", ".", "get", "(", "\"role\"", ")", ",", "\"status\"", ":", "request", ".", "args", ".", "get", "(", "\"status\"", ")", ",", "\"name\"", ":", "request", ".", "args", ".", "get", "(", "\"name\"", ")", ",", "\"email\"", ":", "request", ".", "args", ".", "get", "(", "\"email\"", ")", "}", ")", ")", "return", "users", "@", "classmethod", "def", "get_user_handler", "(", "cls", ",", "id", ")", ":", "\"\"\"\n Get a user\n \"\"\"", "user", "=", "User", ".", "get", "(", "id", ",", "include_deleted", "=", "True", ")", "if", "not", "user", ":", "abort", "(", "404", ",", "\"User doesn't exist\"", ")", "cls", ".", "__", "(", "user_admin", "=", "dict", "(", "user", "=", "user", ",", "options", "=", "cls", ".", "_options", "(", ")", ")", ")", "return", "user", "def", "index", "(", "self", ")", ":", "self", ".", "search_handler", "(", ")", "return", "self", ".", "render", "(", "view_template", "=", "template_page", "%", "\"index\"", ")", "def", "get", "(", "self", ",", "id", ")", ":", "self", ".", "get_user_handler", "(", "id", ")", "return", "self", ".", "render", "(", "view_template", "=", "template_page", "%", "\"get\"", ")", "def", "post", "(", "self", ")", ":", "try", ":", "id", "=", "request", ".", "form", ".", "get", "(", "\"id\"", ")", "user", "=", "User", ".", "get", "(", "id", ",", "include_deleted", "=", "True", ")", "if", "not", "user", ":", "self", ".", "error_", "(", "\"Can't change user info. Invalid user\"", ")", "return", "redirect", "(", "url_for", "(", "\"UserAdmin:index\"", ")", ")", "delete_entry", "=", "True", "if", "request", ".", "form", ".", "get", "(", "\"delete-entry\"", ")", "else", "False", "if", "delete_entry", ":", "user", ".", "update", "(", "status", "=", "user", ".", "STATUS_SUSPENDED", ")", "user", ".", "delete", "(", ")", "self", ".", "success_", "(", "\"User DELETED Successfully!\"", ")", "return", "redirect", "(", "url_for", "(", "\"UserAdmin:get\"", ",", "id", "=", "id", ")", ")", "email", "=", "request", ".", "form", ".", "get", "(", "\"email\"", ")", "password", "=", "request", ".", "form", ".", "get", "(", "\"password\"", ")", "password2", "=", "request", ".", "form", ".", "get", "(", "\"password2\"", ")", "name", "=", "request", ".", "form", ".", "get", "(", "\"name\"", ")", "role", "=", "request", ".", "form", ".", "get", "(", "\"user_role\"", ")", "status", "=", "request", ".", "form", ".", "get", "(", "\"user_status\"", ")", "upd", "=", "{", "}", "if", "email", "and", "email", "!=", "user", ".", "email", ":", "LoginView", ".", "change_login_handler", "(", "user_context", "=", "user", ")", "if", "password", "and", "password2", ":", "LoginView", ".", "change_password_handler", "(", "user_context", "=", "user", ")", "if", "name", "!=", "user", ".", "name", ":", "upd", ".", "update", "(", "{", "\"name\"", ":", "name", "}", ")", "if", "role", "and", "role", "!=", "user", ".", "role", ":", "upd", ".", "update", "(", "{", "\"role\"", ":", "role", "}", ")", "if", "status", "and", "status", "!=", "user", ".", "status", ":", "if", "user", ".", "is_deleted", "and", "status", "==", "user", ".", "STATUS_ACTIVE", ":", "user", ".", "delete", "(", "False", ")", "upd", ".", "update", "(", "{", "\"status\"", ":", "status", "}", ")", "if", "upd", ":", "user", ".", "update", "(", "*", "*", "upd", ")", "self", ".", "success_", "(", "\"User's Info updated successfully!\"", ")", "except", "Exception", "as", "ex", ":", "self", ".", "error_", "(", "\"Error: %s \"", "%", "ex", ".", "message", ")", "return", "redirect", "(", "url_for", "(", "\"UserAdmin:get\"", ",", "id", "=", "id", ")", ")", "@", "route", "(", "\"reset-password\"", ",", "methods", "=", "[", "\"POST\"", "]", ")", "def", "reset_password", "(", "self", ")", ":", "try", ":", "id", "=", "request", ".", "form", ".", "get", "(", "\"id\"", ")", "user", "=", "User", ".", "get", "(", "id", ")", "if", "not", "user", ":", "self", ".", "error_", "(", "\"Can't reset password. Invalid user\"", ")", "return", "redirect", "(", "url_for", "(", "\"User:index\"", ")", ")", "password", "=", "LoginView", ".", "reset_password_handler", "(", "user_context", "=", "user", ")", "self", ".", "success_", "(", "\"User's password reset successfully!\"", ")", "except", "Exception", "as", "ex", ":", "self", ".", "error_", "(", "\"Error: %s \"", "%", "ex", ".", "message", ")", "return", "redirect", "(", "url_for", "(", "\"UserAdmin:get\"", ",", "id", "=", "id", ")", ")", "@", "route", "(", "\"create\"", ",", "methods", "=", "[", "\"POST\"", "]", ")", "def", "create", "(", "self", ")", ":", "try", ":", "account", "=", "LoginView", ".", "signup_handler", "(", ")", "account", ".", "set_role", "(", "request", ".", "form", ".", "get", "(", "\"role\"", ",", "\"USER\"", ")", ")", "self", ".", "success_", "(", "\"User created successfully!\"", ")", "return", "redirect", "(", "url_for", "(", "\"UserAdmin:get\"", ",", "id", "=", "account", ".", "id", ")", ")", "except", "Exception", "as", "ex", ":", "self", ".", "error_", "(", "\"Error: %s\"", "%", "ex", ".", "message", ")", "return", "redirect", "(", "url_for", "(", "\"UserAdmin:index\"", ")", ")", "return", "UserAdmin" ]
:param UserStruct: The User model structure containing other classes :param login_view: The login view interface :param template_dir: The directory containing the view pages :return: UserAdmin Doc: User Admin is a view that allows you to admin users. You must create a Pylot view called `UserAdmin` to activate it UserAdmin = app.views.user_admin(User, Login) class UserAdmin(UserAdmin, Pylot): pass The user admin create some global available vars under '__.user_admin' It's also best to add some security access on it class UserAdmin(UserAdmin, Pylot): decorators = [login_required] You can customize the user info page (::get) by creating the directory in your templates dir, and include the get.html inside of it ie: >/admin/templates/UserAdmin/get.html <div> {% include "Pylot/UserAdmin/get.html" %} <div> <div>Hello {{ __.user_admin.user.name }}<div>
[ ":", "param", "UserStruct", ":", "The", "User", "model", "structure", "containing", "other", "classes", ":", "param", "login_view", ":", "The", "login", "view", "interface", ":", "param", "template_dir", ":", "The", "directory", "containing", "the", "view", "pages", ":", "return", ":", "UserAdmin" ]
python
train
36.251429
CellProfiler/centrosome
centrosome/cpmorphology.py
https://github.com/CellProfiler/centrosome/blob/7bd9350a2d4ae1b215b81eabcecfe560bbb1f32a/centrosome/cpmorphology.py#L1381-L1706
def minimum_enclosing_circle(labels, indexes = None, hull_and_point_count = None): """Find the location of the minimum enclosing circle and its radius labels - a labels matrix indexes - an array giving the label indexes to be processed hull_and_point_count - convex_hull output if already done. None = calculate returns an Nx3 array organized as i,j of the center and radius Algorithm from http://www.personal.kent.edu/~rmuhamma/Compgeometry/MyCG/CG-Applets/Center/centercli.htm who calls it the Applet's Algorithm and ascribes it to Pr. Chrystal The original citation is Professor Chrystal, "On the problem to construct the minimum circle enclosing n given points in a plane", Proceedings of the Edinburgh Mathematical Society, vol 3, 1884 """ if indexes is None: if hull_and_point_count is not None: indexes = np.array(np.unique(hull_and_point_count[0][:,0]),dtype=np.int32) else: max_label = np.max(labels) indexes = np.array(list(range(1,max_label+1)),dtype=np.int32) else: indexes = np.array(indexes,dtype=np.int32) if indexes.shape[0] == 0: return np.zeros((0,2)),np.zeros((0,)) if hull_and_point_count is None: hull, point_count = convex_hull(labels, indexes) else: hull, point_count = hull_and_point_count centers = np.zeros((len(indexes),2)) radii = np.zeros((len(indexes),)) # # point_index is the index to the first point in "hull" for a label # point_index = np.zeros((indexes.shape[0],),int) point_index[1:] = np.cumsum(point_count[:-1]) ######################################################################### # # The algorithm is this: # * Choose a line S from S0 to S1 at random from the set of adjacent # S0 and S1 # * For every vertex (V) other than S, compute the angle from S0 # to V to S. If this angle is obtuse, the vertex V lies within the # minimum enclosing circle and can be ignored. # * Find the minimum angle for all V. # If the minimum angle is obtuse, stop and accept S as the diameter of # the circle. # * If the vertex with the minimum angle makes angles S0-S1-V and # S1-S0-V that are acute and right, then take S0, S1 and V as the # triangle within the circumscribed minimum enclosing circle. # * Otherwise, find the largest obtuse angle among S0-S1-V and # S1-S0-V (V is the vertex with the minimum angle, not all of them). # If S0-S1-V is obtuse, make V the new S1, otherwise make V the new S0 # ########################################################################## # # anti_indexes is used to transform a label # into an index in the above array # anti_indexes_per_point gives the label index of any vertex # anti_indexes=np.zeros((np.max(indexes)+1,),int) anti_indexes[indexes] = list(range(indexes.shape[0])) anti_indexes_per_point = anti_indexes[hull[:,0]] # # Start out by eliminating the degenerate cases: 0, 1 and 2 # centers[point_count==0,:]= np.NaN if np.all(point_count == 0): # Bail if there are no points in any hull to prevent # index failures below. return centers,radii centers[point_count==1,:]=hull[point_index[point_count==1],1:] radii[point_count < 2]=0 centers[point_count==2,:]=(hull[point_index[point_count==2],1:]+ hull[point_index[point_count==2]+1,1:])/2 distance = centers[point_count==2,:] - hull[point_index[point_count==2],1:] radii[point_count==2]=np.sqrt(distance[:,0]**2+distance[:,1]**2) # # Get rid of the degenerate points # keep_me = point_count > 2 # # Pick S0 as the first point in each label # and S1 as the second. # s0_idx = point_index.copy() s1_idx = s0_idx+1 # # number each of the points in a label with an index # which gives # the order in which we'll get their angles. We use this to pick out # points # 2 to N which are the candidate vertices to S # within_label_indexes = (np.array(list(range(hull.shape[0])),int) - point_index[anti_indexes_per_point]) while(np.any(keep_me)): ############################################################# # Label indexing for active labels ############################################################# # # labels_to_consider contains the labels of the objects which # have not been completed # labels_to_consider = indexes[keep_me] # # anti_indexes_to_consider gives the index into any vector # shaped similarly to labels_to_consider (for instance, min_angle # below) for every label in labels_to_consider. # anti_indexes_to_consider =\ np.zeros((np.max(labels_to_consider)+1,),int) anti_indexes_to_consider[labels_to_consider] = \ np.array(list(range(labels_to_consider.shape[0]))) ############################################################## # Vertex indexing for active vertexes other than S0 and S1 ############################################################## # # The vertices are hull-points with indexes of 2 or more # keep_me_vertices is a mask of the vertices to operate on # during this iteration # keep_me_vertices = np.logical_and(keep_me[anti_indexes_per_point], within_label_indexes >= 2) # # v is the vertex coordinates for each vertex considered # v = hull[keep_me_vertices,1:] # # v_labels is the label from the label matrix for each vertex # v_labels = hull[keep_me_vertices,0] # # v_indexes is the index into "hull" for each vertex (and similarly # shaped vectors such as within_label_indexes # v_indexes=np.argwhere(keep_me_vertices).flatten().astype(np.int32) # # anti_indexes_per_vertex gives the index into "indexes" and # any similarly shaped array of per-label values # (for instance s0_idx) for each vertex being considered # anti_indexes_per_vertex = anti_indexes_per_point[keep_me_vertices] # # anti_indexes_to_consider_per_vertex gives the index into any # vector shaped similarly to labels_to_consider for each # vertex being analyzed # anti_indexes_to_consider_per_vertex = anti_indexes_to_consider[v_labels] # # Get S0 and S1 per vertex # s0 = hull[s0_idx[keep_me],1:] s1 = hull[s1_idx[keep_me],1:] s0 = s0[anti_indexes_to_consider_per_vertex] s1 = s1[anti_indexes_to_consider_per_vertex] # # Compute the angle S0-S1-V # # the first vector of the angles is between S0 and S1 # s01 = (s0 - s1).astype(float) # # compute V-S1 and V-S0 at each of the vertices to be considered # vs0 = (v - s0).astype(float) vs1 = (v - s1).astype(float) # #` Take the dot product of s01 and vs1 divided by the length of s01 * # the length of vs1. This gives the cosine of the angle between. # dot_vs1s0 = (np.sum(s01*vs1,1) / np.sqrt(np.sum(s01**2,1)*np.sum(vs1**2,1))) angle_vs1s0 = np.abs(np.arccos(dot_vs1s0)) s10 = -s01 dot_vs0s1 = (np.sum(s10*vs0,1) / np.sqrt(np.sum(s01**2,1)*np.sum(vs0**2,1))) angle_vs0s1 = np.abs(np.arccos(dot_vs0s1)) # # S0-V-S1 is pi - the other two # angle_s0vs1 = np.pi - angle_vs1s0 - angle_vs0s1 assert np.all(angle_s0vs1 >= 0) # # Now we find the minimum angle per label # min_angle = scind.minimum(angle_s0vs1,v_labels, labels_to_consider) min_angle = fixup_scipy_ndimage_result(min_angle) min_angle_per_vertex = min_angle[anti_indexes_to_consider_per_vertex] # # Calculate the index into V of the minimum angle per label. # Use "indexes" instead of labels_to_consider so we get something # with the same shape as keep_me # min_position = scind.minimum_position(angle_s0vs1,v_labels, indexes) min_position = fixup_scipy_ndimage_result(min_position).astype(int) min_position = min_position.flatten() # # Case 1: minimum angle is obtuse or right. Accept S as the diameter. # Case 1a: there are no vertices. Accept S as the diameter. # vertex_counts = scind.sum(keep_me_vertices, hull[:,0], labels_to_consider) vertex_counts = fixup_scipy_ndimage_result(vertex_counts) case_1 = np.logical_or(min_angle >= np.pi / 2, vertex_counts == 0) if np.any(case_1): # convert from a boolean over indexes_to_consider to a boolean # over indexes finish_me = np.zeros((indexes.shape[0],),bool) finish_me[anti_indexes[labels_to_consider[case_1]]] = True s0_finish_me = hull[s0_idx[finish_me],1:].astype(float) s1_finish_me = hull[s1_idx[finish_me],1:].astype(float) centers[finish_me] = (s0_finish_me + s1_finish_me)/2 radii[finish_me] = np.sqrt(np.sum((s0_finish_me - s1_finish_me)**2,1))/2 keep_me[finish_me] = False # # Case 2: all angles for the minimum angle vertex are acute # or right. # Pick S0, S1 and the vertex with the # smallest angle as 3 points on the circle. If you look at the # geometry, the diameter is the length of S0-S1 divided by # the cosine of 1/2 of the angle. The center of the circle # is at the circumcenter of the triangle formed by S0, S1 and # V. case_2 = keep_me.copy() case_2[angle_vs1s0[min_position] > np.pi/2] = False case_2[angle_vs0s1[min_position] > np.pi/2] = False case_2[angle_s0vs1[min_position] > np.pi/2] = False if np.any(case_2): # # Wikipedia (http://en.wikipedia.org/wiki/Circumcircle#Cartesian_coordinates) # gives the following: # D = 2(S0y Vx + S1y S0x - S1y Vx - S0y S1x - S0x Vy + S1x Vy) # D = 2(S0x (S1y-Vy) + S1x(Vy-S0y) + Vx(S0y-S1y) # x = ((S0x**2+S0y**2)(S1y-Vy)+(S1x**2+S1y**2)(Vy-S0y)+(Vx**2+Vy**2)(S0y-S1y)) / D # y = ((S0x**2+S0y**2)(Vx-S1x)+(S1x**2+S1y**2)(S0y-Vy)+(Vx**2+Vy**2)(S1y-S0y)) / D # ss0 = hull[s0_idx[case_2],1:].astype(float) ss1 = hull[s1_idx[case_2],1:].astype(float) vv = v[min_position[case_2]].astype(float) Y = 0 X = 1 D = 2*(ss0[:,X] * (ss1[:,Y] - vv[:,Y]) + ss1[:,X] * (vv[:,Y] - ss0[:,Y]) + vv[:,X] * (ss0[:,Y] - ss1[:,Y])) x = (np.sum(ss0**2,1)*(ss1[:,Y] - vv[:,Y]) + np.sum(ss1**2,1)*(vv[:,Y] - ss0[:,Y]) + np.sum(vv**2,1) *(ss0[:,Y] - ss1[:,Y])) / D y = (np.sum(ss0**2,1)*(vv[:,X] - ss1[:,X]) + np.sum(ss1**2,1)*(ss0[:,X] - vv[:,X]) + np.sum(vv**2,1) *(ss1[:,X] - ss0[:,X])) / D centers[case_2,X] = x centers[case_2,Y] = y distances = ss0-centers[case_2] radii[case_2] = np.sqrt(np.sum(distances**2,1)) keep_me[case_2] = False # # Finally, for anybody who's left, for each of S0-S1-V and # S1-S0-V, for V, the vertex with the minimum angle, # find the largest obtuse angle. The vertex of this # angle (S0 or S1) is inside the enclosing circle, so take V # and either S1 or S0 as the new S. # # This involves a relabeling of within_label_indexes. We replace # either S0 or S1 with V and assign V either 0 or 1 # if np.any(keep_me): labels_to_consider = indexes[keep_me] indexes_to_consider = anti_indexes[labels_to_consider] # # Index into within_label_indexes for each V with the # smallest angle # v_obtuse_indexes = v_indexes[min_position[keep_me]] angle_vs0s1_to_consider = angle_vs0s1[min_position[keep_me]] angle_vs1s0_to_consider = angle_vs1s0[min_position[keep_me]] # # Do the cases where S0 is larger # s0_is_obtuse = angle_vs0s1_to_consider > np.pi/2 if np.any(s0_is_obtuse): # # The index of the obtuse S0 # v_obtuse_s0_indexes = v_obtuse_indexes[s0_is_obtuse] obtuse_s0_idx = s0_idx[indexes_to_consider[s0_is_obtuse]] # # S0 gets the within_label_index of the vertex # within_label_indexes[obtuse_s0_idx] = \ within_label_indexes[v_obtuse_s0_indexes] # # Assign V as the new S0 # s0_idx[indexes_to_consider[s0_is_obtuse]] = v_obtuse_s0_indexes within_label_indexes[v_obtuse_s0_indexes] = 0 # # Do the cases where S1 is larger # s1_is_obtuse = np.logical_not(s0_is_obtuse) if np.any(s1_is_obtuse): # # The index of the obtuse S1 # v_obtuse_s1_indexes = v_obtuse_indexes[s1_is_obtuse] obtuse_s1_idx = s1_idx[indexes_to_consider[s1_is_obtuse]] # # S1 gets V's within_label_index and goes onto the list # of considered vertices. # within_label_indexes[obtuse_s1_idx] = \ within_label_indexes[v_obtuse_s1_indexes] # # Assign V as the new S1 # s1_idx[indexes_to_consider[s1_is_obtuse]] = v_obtuse_s1_indexes within_label_indexes[v_obtuse_s1_indexes] = 1 return centers, radii
[ "def", "minimum_enclosing_circle", "(", "labels", ",", "indexes", "=", "None", ",", "hull_and_point_count", "=", "None", ")", ":", "if", "indexes", "is", "None", ":", "if", "hull_and_point_count", "is", "not", "None", ":", "indexes", "=", "np", ".", "array", "(", "np", ".", "unique", "(", "hull_and_point_count", "[", "0", "]", "[", ":", ",", "0", "]", ")", ",", "dtype", "=", "np", ".", "int32", ")", "else", ":", "max_label", "=", "np", ".", "max", "(", "labels", ")", "indexes", "=", "np", ".", "array", "(", "list", "(", "range", "(", "1", ",", "max_label", "+", "1", ")", ")", ",", "dtype", "=", "np", ".", "int32", ")", "else", ":", "indexes", "=", "np", ".", "array", "(", "indexes", ",", "dtype", "=", "np", ".", "int32", ")", "if", "indexes", ".", "shape", "[", "0", "]", "==", "0", ":", "return", "np", ".", "zeros", "(", "(", "0", ",", "2", ")", ")", ",", "np", ".", "zeros", "(", "(", "0", ",", ")", ")", "if", "hull_and_point_count", "is", "None", ":", "hull", ",", "point_count", "=", "convex_hull", "(", "labels", ",", "indexes", ")", "else", ":", "hull", ",", "point_count", "=", "hull_and_point_count", "centers", "=", "np", ".", "zeros", "(", "(", "len", "(", "indexes", ")", ",", "2", ")", ")", "radii", "=", "np", ".", "zeros", "(", "(", "len", "(", "indexes", ")", ",", ")", ")", "#", "# point_index is the index to the first point in \"hull\" for a label", "#", "point_index", "=", "np", ".", "zeros", "(", "(", "indexes", ".", "shape", "[", "0", "]", ",", ")", ",", "int", ")", "point_index", "[", "1", ":", "]", "=", "np", ".", "cumsum", "(", "point_count", "[", ":", "-", "1", "]", ")", "#########################################################################", "#", "# The algorithm is this:", "# * Choose a line S from S0 to S1 at random from the set of adjacent", "# S0 and S1", "# * For every vertex (V) other than S, compute the angle from S0", "# to V to S. If this angle is obtuse, the vertex V lies within the", "# minimum enclosing circle and can be ignored.", "# * Find the minimum angle for all V.", "# If the minimum angle is obtuse, stop and accept S as the diameter of ", "# the circle.", "# * If the vertex with the minimum angle makes angles S0-S1-V and", "# S1-S0-V that are acute and right, then take S0, S1 and V as the", "# triangle within the circumscribed minimum enclosing circle.", "# * Otherwise, find the largest obtuse angle among S0-S1-V and", "# S1-S0-V (V is the vertex with the minimum angle, not all of them).", "# If S0-S1-V is obtuse, make V the new S1, otherwise make V the new S0", "#", "##########################################################################", "#", "# anti_indexes is used to transform a label # into an index in the above array", "# anti_indexes_per_point gives the label index of any vertex", "#", "anti_indexes", "=", "np", ".", "zeros", "(", "(", "np", ".", "max", "(", "indexes", ")", "+", "1", ",", ")", ",", "int", ")", "anti_indexes", "[", "indexes", "]", "=", "list", "(", "range", "(", "indexes", ".", "shape", "[", "0", "]", ")", ")", "anti_indexes_per_point", "=", "anti_indexes", "[", "hull", "[", ":", ",", "0", "]", "]", "#", "# Start out by eliminating the degenerate cases: 0, 1 and 2", "#", "centers", "[", "point_count", "==", "0", ",", ":", "]", "=", "np", ".", "NaN", "if", "np", ".", "all", "(", "point_count", "==", "0", ")", ":", "# Bail if there are no points in any hull to prevent", "# index failures below.", "return", "centers", ",", "radii", "centers", "[", "point_count", "==", "1", ",", ":", "]", "=", "hull", "[", "point_index", "[", "point_count", "==", "1", "]", ",", "1", ":", "]", "radii", "[", "point_count", "<", "2", "]", "=", "0", "centers", "[", "point_count", "==", "2", ",", ":", "]", "=", "(", "hull", "[", "point_index", "[", "point_count", "==", "2", "]", ",", "1", ":", "]", "+", "hull", "[", "point_index", "[", "point_count", "==", "2", "]", "+", "1", ",", "1", ":", "]", ")", "/", "2", "distance", "=", "centers", "[", "point_count", "==", "2", ",", ":", "]", "-", "hull", "[", "point_index", "[", "point_count", "==", "2", "]", ",", "1", ":", "]", "radii", "[", "point_count", "==", "2", "]", "=", "np", ".", "sqrt", "(", "distance", "[", ":", ",", "0", "]", "**", "2", "+", "distance", "[", ":", ",", "1", "]", "**", "2", ")", "#", "# Get rid of the degenerate points", "#", "keep_me", "=", "point_count", ">", "2", "#", "# Pick S0 as the first point in each label", "# and S1 as the second.", "#", "s0_idx", "=", "point_index", ".", "copy", "(", ")", "s1_idx", "=", "s0_idx", "+", "1", "#", "# number each of the points in a label with an index # which gives", "# the order in which we'll get their angles. We use this to pick out", "# points # 2 to N which are the candidate vertices to S", "# ", "within_label_indexes", "=", "(", "np", ".", "array", "(", "list", "(", "range", "(", "hull", ".", "shape", "[", "0", "]", ")", ")", ",", "int", ")", "-", "point_index", "[", "anti_indexes_per_point", "]", ")", "while", "(", "np", ".", "any", "(", "keep_me", ")", ")", ":", "#############################################################", "# Label indexing for active labels", "#############################################################", "#", "# labels_to_consider contains the labels of the objects which", "# have not been completed", "#", "labels_to_consider", "=", "indexes", "[", "keep_me", "]", "#", "# anti_indexes_to_consider gives the index into any vector", "# shaped similarly to labels_to_consider (for instance, min_angle", "# below) for every label in labels_to_consider.", "#", "anti_indexes_to_consider", "=", "np", ".", "zeros", "(", "(", "np", ".", "max", "(", "labels_to_consider", ")", "+", "1", ",", ")", ",", "int", ")", "anti_indexes_to_consider", "[", "labels_to_consider", "]", "=", "np", ".", "array", "(", "list", "(", "range", "(", "labels_to_consider", ".", "shape", "[", "0", "]", ")", ")", ")", "##############################################################", "# Vertex indexing for active vertexes other than S0 and S1", "##############################################################", "#", "# The vertices are hull-points with indexes of 2 or more", "# keep_me_vertices is a mask of the vertices to operate on", "# during this iteration", "#", "keep_me_vertices", "=", "np", ".", "logical_and", "(", "keep_me", "[", "anti_indexes_per_point", "]", ",", "within_label_indexes", ">=", "2", ")", "#", "# v is the vertex coordinates for each vertex considered", "#", "v", "=", "hull", "[", "keep_me_vertices", ",", "1", ":", "]", "#", "# v_labels is the label from the label matrix for each vertex", "#", "v_labels", "=", "hull", "[", "keep_me_vertices", ",", "0", "]", "#", "# v_indexes is the index into \"hull\" for each vertex (and similarly", "# shaped vectors such as within_label_indexes", "#", "v_indexes", "=", "np", ".", "argwhere", "(", "keep_me_vertices", ")", ".", "flatten", "(", ")", ".", "astype", "(", "np", ".", "int32", ")", "#", "# anti_indexes_per_vertex gives the index into \"indexes\" and", "# any similarly shaped array of per-label values", "# (for instance s0_idx) for each vertex being considered", "#", "anti_indexes_per_vertex", "=", "anti_indexes_per_point", "[", "keep_me_vertices", "]", "#", "# anti_indexes_to_consider_per_vertex gives the index into any", "# vector shaped similarly to labels_to_consider for each", "# vertex being analyzed", "#", "anti_indexes_to_consider_per_vertex", "=", "anti_indexes_to_consider", "[", "v_labels", "]", "#", "# Get S0 and S1 per vertex", "#", "s0", "=", "hull", "[", "s0_idx", "[", "keep_me", "]", ",", "1", ":", "]", "s1", "=", "hull", "[", "s1_idx", "[", "keep_me", "]", ",", "1", ":", "]", "s0", "=", "s0", "[", "anti_indexes_to_consider_per_vertex", "]", "s1", "=", "s1", "[", "anti_indexes_to_consider_per_vertex", "]", "#", "# Compute the angle S0-S1-V", "#", "# the first vector of the angles is between S0 and S1", "#", "s01", "=", "(", "s0", "-", "s1", ")", ".", "astype", "(", "float", ")", "#", "# compute V-S1 and V-S0 at each of the vertices to be considered", "#", "vs0", "=", "(", "v", "-", "s0", ")", ".", "astype", "(", "float", ")", "vs1", "=", "(", "v", "-", "s1", ")", ".", "astype", "(", "float", ")", "#", "#` Take the dot product of s01 and vs1 divided by the length of s01 *", "# the length of vs1. This gives the cosine of the angle between.", "#", "dot_vs1s0", "=", "(", "np", ".", "sum", "(", "s01", "*", "vs1", ",", "1", ")", "/", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "s01", "**", "2", ",", "1", ")", "*", "np", ".", "sum", "(", "vs1", "**", "2", ",", "1", ")", ")", ")", "angle_vs1s0", "=", "np", ".", "abs", "(", "np", ".", "arccos", "(", "dot_vs1s0", ")", ")", "s10", "=", "-", "s01", "dot_vs0s1", "=", "(", "np", ".", "sum", "(", "s10", "*", "vs0", ",", "1", ")", "/", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "s01", "**", "2", ",", "1", ")", "*", "np", ".", "sum", "(", "vs0", "**", "2", ",", "1", ")", ")", ")", "angle_vs0s1", "=", "np", ".", "abs", "(", "np", ".", "arccos", "(", "dot_vs0s1", ")", ")", "#", "# S0-V-S1 is pi - the other two", "#", "angle_s0vs1", "=", "np", ".", "pi", "-", "angle_vs1s0", "-", "angle_vs0s1", "assert", "np", ".", "all", "(", "angle_s0vs1", ">=", "0", ")", "#", "# Now we find the minimum angle per label", "#", "min_angle", "=", "scind", ".", "minimum", "(", "angle_s0vs1", ",", "v_labels", ",", "labels_to_consider", ")", "min_angle", "=", "fixup_scipy_ndimage_result", "(", "min_angle", ")", "min_angle_per_vertex", "=", "min_angle", "[", "anti_indexes_to_consider_per_vertex", "]", "#", "# Calculate the index into V of the minimum angle per label.", "# Use \"indexes\" instead of labels_to_consider so we get something", "# with the same shape as keep_me", "#", "min_position", "=", "scind", ".", "minimum_position", "(", "angle_s0vs1", ",", "v_labels", ",", "indexes", ")", "min_position", "=", "fixup_scipy_ndimage_result", "(", "min_position", ")", ".", "astype", "(", "int", ")", "min_position", "=", "min_position", ".", "flatten", "(", ")", "#", "# Case 1: minimum angle is obtuse or right. Accept S as the diameter.", "# Case 1a: there are no vertices. Accept S as the diameter.", "#", "vertex_counts", "=", "scind", ".", "sum", "(", "keep_me_vertices", ",", "hull", "[", ":", ",", "0", "]", ",", "labels_to_consider", ")", "vertex_counts", "=", "fixup_scipy_ndimage_result", "(", "vertex_counts", ")", "case_1", "=", "np", ".", "logical_or", "(", "min_angle", ">=", "np", ".", "pi", "/", "2", ",", "vertex_counts", "==", "0", ")", "if", "np", ".", "any", "(", "case_1", ")", ":", "# convert from a boolean over indexes_to_consider to a boolean", "# over indexes", "finish_me", "=", "np", ".", "zeros", "(", "(", "indexes", ".", "shape", "[", "0", "]", ",", ")", ",", "bool", ")", "finish_me", "[", "anti_indexes", "[", "labels_to_consider", "[", "case_1", "]", "]", "]", "=", "True", "s0_finish_me", "=", "hull", "[", "s0_idx", "[", "finish_me", "]", ",", "1", ":", "]", ".", "astype", "(", "float", ")", "s1_finish_me", "=", "hull", "[", "s1_idx", "[", "finish_me", "]", ",", "1", ":", "]", ".", "astype", "(", "float", ")", "centers", "[", "finish_me", "]", "=", "(", "s0_finish_me", "+", "s1_finish_me", ")", "/", "2", "radii", "[", "finish_me", "]", "=", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "(", "s0_finish_me", "-", "s1_finish_me", ")", "**", "2", ",", "1", ")", ")", "/", "2", "keep_me", "[", "finish_me", "]", "=", "False", "#", "# Case 2: all angles for the minimum angle vertex are acute ", "# or right.", "# Pick S0, S1 and the vertex with the", "# smallest angle as 3 points on the circle. If you look at the", "# geometry, the diameter is the length of S0-S1 divided by", "# the cosine of 1/2 of the angle. The center of the circle", "# is at the circumcenter of the triangle formed by S0, S1 and", "# V.", "case_2", "=", "keep_me", ".", "copy", "(", ")", "case_2", "[", "angle_vs1s0", "[", "min_position", "]", ">", "np", ".", "pi", "/", "2", "]", "=", "False", "case_2", "[", "angle_vs0s1", "[", "min_position", "]", ">", "np", ".", "pi", "/", "2", "]", "=", "False", "case_2", "[", "angle_s0vs1", "[", "min_position", "]", ">", "np", ".", "pi", "/", "2", "]", "=", "False", "if", "np", ".", "any", "(", "case_2", ")", ":", "#", "# Wikipedia (http://en.wikipedia.org/wiki/Circumcircle#Cartesian_coordinates)", "# gives the following:", "# D = 2(S0y Vx + S1y S0x - S1y Vx - S0y S1x - S0x Vy + S1x Vy)", "# D = 2(S0x (S1y-Vy) + S1x(Vy-S0y) + Vx(S0y-S1y)", "# x = ((S0x**2+S0y**2)(S1y-Vy)+(S1x**2+S1y**2)(Vy-S0y)+(Vx**2+Vy**2)(S0y-S1y)) / D", "# y = ((S0x**2+S0y**2)(Vx-S1x)+(S1x**2+S1y**2)(S0y-Vy)+(Vx**2+Vy**2)(S1y-S0y)) / D", "#", "ss0", "=", "hull", "[", "s0_idx", "[", "case_2", "]", ",", "1", ":", "]", ".", "astype", "(", "float", ")", "ss1", "=", "hull", "[", "s1_idx", "[", "case_2", "]", ",", "1", ":", "]", ".", "astype", "(", "float", ")", "vv", "=", "v", "[", "min_position", "[", "case_2", "]", "]", ".", "astype", "(", "float", ")", "Y", "=", "0", "X", "=", "1", "D", "=", "2", "*", "(", "ss0", "[", ":", ",", "X", "]", "*", "(", "ss1", "[", ":", ",", "Y", "]", "-", "vv", "[", ":", ",", "Y", "]", ")", "+", "ss1", "[", ":", ",", "X", "]", "*", "(", "vv", "[", ":", ",", "Y", "]", "-", "ss0", "[", ":", ",", "Y", "]", ")", "+", "vv", "[", ":", ",", "X", "]", "*", "(", "ss0", "[", ":", ",", "Y", "]", "-", "ss1", "[", ":", ",", "Y", "]", ")", ")", "x", "=", "(", "np", ".", "sum", "(", "ss0", "**", "2", ",", "1", ")", "*", "(", "ss1", "[", ":", ",", "Y", "]", "-", "vv", "[", ":", ",", "Y", "]", ")", "+", "np", ".", "sum", "(", "ss1", "**", "2", ",", "1", ")", "*", "(", "vv", "[", ":", ",", "Y", "]", "-", "ss0", "[", ":", ",", "Y", "]", ")", "+", "np", ".", "sum", "(", "vv", "**", "2", ",", "1", ")", "*", "(", "ss0", "[", ":", ",", "Y", "]", "-", "ss1", "[", ":", ",", "Y", "]", ")", ")", "/", "D", "y", "=", "(", "np", ".", "sum", "(", "ss0", "**", "2", ",", "1", ")", "*", "(", "vv", "[", ":", ",", "X", "]", "-", "ss1", "[", ":", ",", "X", "]", ")", "+", "np", ".", "sum", "(", "ss1", "**", "2", ",", "1", ")", "*", "(", "ss0", "[", ":", ",", "X", "]", "-", "vv", "[", ":", ",", "X", "]", ")", "+", "np", ".", "sum", "(", "vv", "**", "2", ",", "1", ")", "*", "(", "ss1", "[", ":", ",", "X", "]", "-", "ss0", "[", ":", ",", "X", "]", ")", ")", "/", "D", "centers", "[", "case_2", ",", "X", "]", "=", "x", "centers", "[", "case_2", ",", "Y", "]", "=", "y", "distances", "=", "ss0", "-", "centers", "[", "case_2", "]", "radii", "[", "case_2", "]", "=", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "distances", "**", "2", ",", "1", ")", ")", "keep_me", "[", "case_2", "]", "=", "False", "#", "# Finally, for anybody who's left, for each of S0-S1-V and", "# S1-S0-V, for V, the vertex with the minimum angle,", "# find the largest obtuse angle. The vertex of this", "# angle (S0 or S1) is inside the enclosing circle, so take V", "# and either S1 or S0 as the new S.", "#", "# This involves a relabeling of within_label_indexes. We replace", "# either S0 or S1 with V and assign V either 0 or 1", "#", "if", "np", ".", "any", "(", "keep_me", ")", ":", "labels_to_consider", "=", "indexes", "[", "keep_me", "]", "indexes_to_consider", "=", "anti_indexes", "[", "labels_to_consider", "]", "#", "# Index into within_label_indexes for each V with the", "# smallest angle", "#", "v_obtuse_indexes", "=", "v_indexes", "[", "min_position", "[", "keep_me", "]", "]", "angle_vs0s1_to_consider", "=", "angle_vs0s1", "[", "min_position", "[", "keep_me", "]", "]", "angle_vs1s0_to_consider", "=", "angle_vs1s0", "[", "min_position", "[", "keep_me", "]", "]", "#", "# Do the cases where S0 is larger", "#", "s0_is_obtuse", "=", "angle_vs0s1_to_consider", ">", "np", ".", "pi", "/", "2", "if", "np", ".", "any", "(", "s0_is_obtuse", ")", ":", "#", "# The index of the obtuse S0", "#", "v_obtuse_s0_indexes", "=", "v_obtuse_indexes", "[", "s0_is_obtuse", "]", "obtuse_s0_idx", "=", "s0_idx", "[", "indexes_to_consider", "[", "s0_is_obtuse", "]", "]", "#", "# S0 gets the within_label_index of the vertex", "#", "within_label_indexes", "[", "obtuse_s0_idx", "]", "=", "within_label_indexes", "[", "v_obtuse_s0_indexes", "]", "#", "# Assign V as the new S0", "#", "s0_idx", "[", "indexes_to_consider", "[", "s0_is_obtuse", "]", "]", "=", "v_obtuse_s0_indexes", "within_label_indexes", "[", "v_obtuse_s0_indexes", "]", "=", "0", "#", "# Do the cases where S1 is larger", "#", "s1_is_obtuse", "=", "np", ".", "logical_not", "(", "s0_is_obtuse", ")", "if", "np", ".", "any", "(", "s1_is_obtuse", ")", ":", "#", "# The index of the obtuse S1", "#", "v_obtuse_s1_indexes", "=", "v_obtuse_indexes", "[", "s1_is_obtuse", "]", "obtuse_s1_idx", "=", "s1_idx", "[", "indexes_to_consider", "[", "s1_is_obtuse", "]", "]", "#", "# S1 gets V's within_label_index and goes onto the list", "# of considered vertices.", "#", "within_label_indexes", "[", "obtuse_s1_idx", "]", "=", "within_label_indexes", "[", "v_obtuse_s1_indexes", "]", "#", "# Assign V as the new S1", "#", "s1_idx", "[", "indexes_to_consider", "[", "s1_is_obtuse", "]", "]", "=", "v_obtuse_s1_indexes", "within_label_indexes", "[", "v_obtuse_s1_indexes", "]", "=", "1", "return", "centers", ",", "radii" ]
Find the location of the minimum enclosing circle and its radius labels - a labels matrix indexes - an array giving the label indexes to be processed hull_and_point_count - convex_hull output if already done. None = calculate returns an Nx3 array organized as i,j of the center and radius Algorithm from http://www.personal.kent.edu/~rmuhamma/Compgeometry/MyCG/CG-Applets/Center/centercli.htm who calls it the Applet's Algorithm and ascribes it to Pr. Chrystal The original citation is Professor Chrystal, "On the problem to construct the minimum circle enclosing n given points in a plane", Proceedings of the Edinburgh Mathematical Society, vol 3, 1884
[ "Find", "the", "location", "of", "the", "minimum", "enclosing", "circle", "and", "its", "radius", "labels", "-", "a", "labels", "matrix", "indexes", "-", "an", "array", "giving", "the", "label", "indexes", "to", "be", "processed", "hull_and_point_count", "-", "convex_hull", "output", "if", "already", "done", ".", "None", "=", "calculate", "returns", "an", "Nx3", "array", "organized", "as", "i", "j", "of", "the", "center", "and", "radius", "Algorithm", "from", "http", ":", "//", "www", ".", "personal", ".", "kent", ".", "edu", "/", "~rmuhamma", "/", "Compgeometry", "/", "MyCG", "/", "CG", "-", "Applets", "/", "Center", "/", "centercli", ".", "htm", "who", "calls", "it", "the", "Applet", "s", "Algorithm", "and", "ascribes", "it", "to", "Pr", ".", "Chrystal", "The", "original", "citation", "is", "Professor", "Chrystal", "On", "the", "problem", "to", "construct", "the", "minimum", "circle", "enclosing", "n", "given", "points", "in", "a", "plane", "Proceedings", "of", "the", "Edinburgh", "Mathematical", "Society", "vol", "3", "1884" ]
python
train
43.98773
quasipedia/simpleactors
simpleactors.py
https://github.com/quasipedia/simpleactors/blob/4253da2d10b3df080b5e7b3fbee03aa6dd10db07/simpleactors.py#L84-L92
def unplug(self): '''Remove the actor's methods from the callback registry.''' if not self.__plugged: return members = set([method for _, method in inspect.getmembers(self, predicate=inspect.ismethod)]) for message in global_callbacks: global_callbacks[message] -= members self.__plugged = False
[ "def", "unplug", "(", "self", ")", ":", "if", "not", "self", ".", "__plugged", ":", "return", "members", "=", "set", "(", "[", "method", "for", "_", ",", "method", "in", "inspect", ".", "getmembers", "(", "self", ",", "predicate", "=", "inspect", ".", "ismethod", ")", "]", ")", "for", "message", "in", "global_callbacks", ":", "global_callbacks", "[", "message", "]", "-=", "members", "self", ".", "__plugged", "=", "False" ]
Remove the actor's methods from the callback registry.
[ "Remove", "the", "actor", "s", "methods", "from", "the", "callback", "registry", "." ]
python
train
41.444444
maciejkula/glove-python
glove/metrics/accuracy.py
https://github.com/maciejkula/glove-python/blob/749494290fdfd24379dcc2e244c583ee61808634/glove/metrics/accuracy.py#L10-L25
def read_analogy_file(filename): """ Read the analogy task test set from a file. """ section = None with open(filename, 'r') as questions_file: for line in questions_file: if line.startswith(':'): section = line[2:].replace('\n', '') continue else: words = line.replace('\n', '').split(' ') yield section, words
[ "def", "read_analogy_file", "(", "filename", ")", ":", "section", "=", "None", "with", "open", "(", "filename", ",", "'r'", ")", "as", "questions_file", ":", "for", "line", "in", "questions_file", ":", "if", "line", ".", "startswith", "(", "':'", ")", ":", "section", "=", "line", "[", "2", ":", "]", ".", "replace", "(", "'\\n'", ",", "''", ")", "continue", "else", ":", "words", "=", "line", ".", "replace", "(", "'\\n'", ",", "''", ")", ".", "split", "(", "' '", ")", "yield", "section", ",", "words" ]
Read the analogy task test set from a file.
[ "Read", "the", "analogy", "task", "test", "set", "from", "a", "file", "." ]
python
train
25.9375
vtkiorg/vtki
vtki/export.py
https://github.com/vtkiorg/vtki/blob/5ccad7ae6d64a03e9594c9c7474c8aab3eb22dd1/vtki/export.py#L217-L225
def dump_t_coords(dataset_dir, data_dir, dataset, root=None, compress=True): """dump vtkjs texture coordinates""" if root is None: root = {} tcoords = dataset.GetPointData().GetTCoords() if tcoords: dumped_array = dump_data_array(dataset_dir, data_dir, tcoords, {}, compress) root['pointData']['activeTCoords'] = len(root['pointData']['arrays']) root['pointData']['arrays'].append({'data': dumped_array})
[ "def", "dump_t_coords", "(", "dataset_dir", ",", "data_dir", ",", "dataset", ",", "root", "=", "None", ",", "compress", "=", "True", ")", ":", "if", "root", "is", "None", ":", "root", "=", "{", "}", "tcoords", "=", "dataset", ".", "GetPointData", "(", ")", ".", "GetTCoords", "(", ")", "if", "tcoords", ":", "dumped_array", "=", "dump_data_array", "(", "dataset_dir", ",", "data_dir", ",", "tcoords", ",", "{", "}", ",", "compress", ")", "root", "[", "'pointData'", "]", "[", "'activeTCoords'", "]", "=", "len", "(", "root", "[", "'pointData'", "]", "[", "'arrays'", "]", ")", "root", "[", "'pointData'", "]", "[", "'arrays'", "]", ".", "append", "(", "{", "'data'", ":", "dumped_array", "}", ")" ]
dump vtkjs texture coordinates
[ "dump", "vtkjs", "texture", "coordinates" ]
python
train
49.333333
rueckstiess/mtools
mtools/util/logfile.py
https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/logfile.py#L166-L172
def versions(self): """Return all version changes.""" versions = [] for v, _ in self.restarts: if len(versions) == 0 or v != versions[-1]: versions.append(v) return versions
[ "def", "versions", "(", "self", ")", ":", "versions", "=", "[", "]", "for", "v", ",", "_", "in", "self", ".", "restarts", ":", "if", "len", "(", "versions", ")", "==", "0", "or", "v", "!=", "versions", "[", "-", "1", "]", ":", "versions", ".", "append", "(", "v", ")", "return", "versions" ]
Return all version changes.
[ "Return", "all", "version", "changes", "." ]
python
train
32.428571
tonyfischetti/sake
sakelib/acts.py
https://github.com/tonyfischetti/sake/blob/b7ad20fe8e7137db99a20ac06b8da26492601b00/sakelib/acts.py#L409-L431
def get_ties(G): """ If you specify a target that shares a dependency with another target, both targets need to be updated. This is because running one will resolve the sha mismatch and sake will think that the other one doesn't have to run. This is called a "tie". This function will find such ties. """ # we are going to make a dictionary whose keys are every dependency # and whose values are a list of all targets that use that dependency. # after making the dictionary, values whose length is above one will # be called "ties" ties = [] dep_dict = {} for node in G.nodes(data=True): if 'dependencies' in node[1]: for item in node[1]['dependencies']: if item not in dep_dict: dep_dict[item] = [] dep_dict[item].append(node[0]) for item in dep_dict: if len(list(set(dep_dict[item]))) > 1: ties.append(list(set(dep_dict[item]))) return ties
[ "def", "get_ties", "(", "G", ")", ":", "# we are going to make a dictionary whose keys are every dependency", "# and whose values are a list of all targets that use that dependency.", "# after making the dictionary, values whose length is above one will", "# be called \"ties\"", "ties", "=", "[", "]", "dep_dict", "=", "{", "}", "for", "node", "in", "G", ".", "nodes", "(", "data", "=", "True", ")", ":", "if", "'dependencies'", "in", "node", "[", "1", "]", ":", "for", "item", "in", "node", "[", "1", "]", "[", "'dependencies'", "]", ":", "if", "item", "not", "in", "dep_dict", ":", "dep_dict", "[", "item", "]", "=", "[", "]", "dep_dict", "[", "item", "]", ".", "append", "(", "node", "[", "0", "]", ")", "for", "item", "in", "dep_dict", ":", "if", "len", "(", "list", "(", "set", "(", "dep_dict", "[", "item", "]", ")", ")", ")", ">", "1", ":", "ties", ".", "append", "(", "list", "(", "set", "(", "dep_dict", "[", "item", "]", ")", ")", ")", "return", "ties" ]
If you specify a target that shares a dependency with another target, both targets need to be updated. This is because running one will resolve the sha mismatch and sake will think that the other one doesn't have to run. This is called a "tie". This function will find such ties.
[ "If", "you", "specify", "a", "target", "that", "shares", "a", "dependency", "with", "another", "target", "both", "targets", "need", "to", "be", "updated", ".", "This", "is", "because", "running", "one", "will", "resolve", "the", "sha", "mismatch", "and", "sake", "will", "think", "that", "the", "other", "one", "doesn", "t", "have", "to", "run", ".", "This", "is", "called", "a", "tie", ".", "This", "function", "will", "find", "such", "ties", "." ]
python
valid
42.26087
Deisss/python-sockjsroom
sockjsroom/socketHandler.py
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/sockjsroom/socketHandler.py#L61-L65
def join(self, _id): """ Join a room """ if not SockJSRoomHandler._room.has_key(self._gcls() + _id): SockJSRoomHandler._room[self._gcls() + _id] = set() SockJSRoomHandler._room[self._gcls() + _id].add(self)
[ "def", "join", "(", "self", ",", "_id", ")", ":", "if", "not", "SockJSRoomHandler", ".", "_room", ".", "has_key", "(", "self", ".", "_gcls", "(", ")", "+", "_id", ")", ":", "SockJSRoomHandler", ".", "_room", "[", "self", ".", "_gcls", "(", ")", "+", "_id", "]", "=", "set", "(", ")", "SockJSRoomHandler", ".", "_room", "[", "self", ".", "_gcls", "(", ")", "+", "_id", "]", ".", "add", "(", "self", ")" ]
Join a room
[ "Join", "a", "room" ]
python
train
47.6
androguard/androguard
androguard/session.py
https://github.com/androguard/androguard/blob/984c0d981be2950cf0451e484f7b0d4d53bc4911/androguard/session.py#L208-L246
def addDEX(self, filename, data, dx=None): """ Add a DEX file to the Session and run analysis. :param filename: the (file)name of the DEX file :param data: binary data of the dex file :param dx: an existing Analysis Object (optional) :return: A tuple of SHA256 Hash, DalvikVMFormat Object and Analysis object """ digest = hashlib.sha256(data).hexdigest() log.debug("add DEX:%s" % digest) log.debug("Parsing format ...") d = DalvikVMFormat(data) log.debug("added DEX:%s" % digest) self.analyzed_files[filename].append(digest) self.analyzed_digest[digest] = filename self.analyzed_dex[digest] = d if dx is None: dx = Analysis() dx.add(d) dx.create_xref() # TODO: If multidex: this will called many times per dex, even if already set for d in dx.vms: # TODO: allow different decompiler here! d.set_decompiler(DecompilerDAD(d, dx)) d.set_vmanalysis(dx) self.analyzed_vms[digest] = dx if self.export_ipython: log.debug("Exporting in ipython") d.create_python_export() return digest, d, dx
[ "def", "addDEX", "(", "self", ",", "filename", ",", "data", ",", "dx", "=", "None", ")", ":", "digest", "=", "hashlib", ".", "sha256", "(", "data", ")", ".", "hexdigest", "(", ")", "log", ".", "debug", "(", "\"add DEX:%s\"", "%", "digest", ")", "log", ".", "debug", "(", "\"Parsing format ...\"", ")", "d", "=", "DalvikVMFormat", "(", "data", ")", "log", ".", "debug", "(", "\"added DEX:%s\"", "%", "digest", ")", "self", ".", "analyzed_files", "[", "filename", "]", ".", "append", "(", "digest", ")", "self", ".", "analyzed_digest", "[", "digest", "]", "=", "filename", "self", ".", "analyzed_dex", "[", "digest", "]", "=", "d", "if", "dx", "is", "None", ":", "dx", "=", "Analysis", "(", ")", "dx", ".", "add", "(", "d", ")", "dx", ".", "create_xref", "(", ")", "# TODO: If multidex: this will called many times per dex, even if already set", "for", "d", "in", "dx", ".", "vms", ":", "# TODO: allow different decompiler here!", "d", ".", "set_decompiler", "(", "DecompilerDAD", "(", "d", ",", "dx", ")", ")", "d", ".", "set_vmanalysis", "(", "dx", ")", "self", ".", "analyzed_vms", "[", "digest", "]", "=", "dx", "if", "self", ".", "export_ipython", ":", "log", ".", "debug", "(", "\"Exporting in ipython\"", ")", "d", ".", "create_python_export", "(", ")", "return", "digest", ",", "d", ",", "dx" ]
Add a DEX file to the Session and run analysis. :param filename: the (file)name of the DEX file :param data: binary data of the dex file :param dx: an existing Analysis Object (optional) :return: A tuple of SHA256 Hash, DalvikVMFormat Object and Analysis object
[ "Add", "a", "DEX", "file", "to", "the", "Session", "and", "run", "analysis", "." ]
python
train
31.025641
saltstack/salt
salt/modules/ipset.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/ipset.py#L320-L378
def add(setname=None, entry=None, family='ipv4', **kwargs): ''' Append an entry to the specified set. CLI Example: .. code-block:: bash salt '*' ipset.add setname 192.168.1.26 salt '*' ipset.add setname 192.168.0.3,AA:BB:CC:DD:EE:FF ''' if not setname: return 'Error: Set needs to be specified' if not entry: return 'Error: Entry needs to be specified' setinfo = _find_set_info(setname) if not setinfo: return 'Error: Set {0} does not exist'.format(setname) settype = setinfo['Type'] cmd = '{0}'.format(entry) if 'timeout' in kwargs: if 'timeout' not in setinfo['Header']: return 'Error: Set {0} not created with timeout support'.format(setname) if 'packets' in kwargs or 'bytes' in kwargs: if 'counters' not in setinfo['Header']: return 'Error: Set {0} not created with counters support'.format(setname) if 'comment' in kwargs: if 'comment' not in setinfo['Header']: return 'Error: Set {0} not created with comment support'.format(setname) if 'comment' not in entry: cmd = '{0} comment "{1}"'.format(cmd, kwargs['comment']) if set(['skbmark', 'skbprio', 'skbqueue']) & set(kwargs): if 'skbinfo' not in setinfo['Header']: return 'Error: Set {0} not created with skbinfo support'.format(setname) for item in _ADD_OPTIONS[settype]: if item in kwargs: cmd = '{0} {1} {2}'.format(cmd, item, kwargs[item]) current_members = _find_set_members(setname) if cmd in current_members: return 'Warn: Entry {0} already exists in set {1}'.format(cmd, setname) # Using -exist to ensure entries are updated if the comment changes cmd = '{0} add -exist {1} {2}'.format(_ipset_cmd(), setname, cmd) out = __salt__['cmd.run'](cmd, python_shell=False) if not out: return 'Success' return 'Error: {0}'.format(out)
[ "def", "add", "(", "setname", "=", "None", ",", "entry", "=", "None", ",", "family", "=", "'ipv4'", ",", "*", "*", "kwargs", ")", ":", "if", "not", "setname", ":", "return", "'Error: Set needs to be specified'", "if", "not", "entry", ":", "return", "'Error: Entry needs to be specified'", "setinfo", "=", "_find_set_info", "(", "setname", ")", "if", "not", "setinfo", ":", "return", "'Error: Set {0} does not exist'", ".", "format", "(", "setname", ")", "settype", "=", "setinfo", "[", "'Type'", "]", "cmd", "=", "'{0}'", ".", "format", "(", "entry", ")", "if", "'timeout'", "in", "kwargs", ":", "if", "'timeout'", "not", "in", "setinfo", "[", "'Header'", "]", ":", "return", "'Error: Set {0} not created with timeout support'", ".", "format", "(", "setname", ")", "if", "'packets'", "in", "kwargs", "or", "'bytes'", "in", "kwargs", ":", "if", "'counters'", "not", "in", "setinfo", "[", "'Header'", "]", ":", "return", "'Error: Set {0} not created with counters support'", ".", "format", "(", "setname", ")", "if", "'comment'", "in", "kwargs", ":", "if", "'comment'", "not", "in", "setinfo", "[", "'Header'", "]", ":", "return", "'Error: Set {0} not created with comment support'", ".", "format", "(", "setname", ")", "if", "'comment'", "not", "in", "entry", ":", "cmd", "=", "'{0} comment \"{1}\"'", ".", "format", "(", "cmd", ",", "kwargs", "[", "'comment'", "]", ")", "if", "set", "(", "[", "'skbmark'", ",", "'skbprio'", ",", "'skbqueue'", "]", ")", "&", "set", "(", "kwargs", ")", ":", "if", "'skbinfo'", "not", "in", "setinfo", "[", "'Header'", "]", ":", "return", "'Error: Set {0} not created with skbinfo support'", ".", "format", "(", "setname", ")", "for", "item", "in", "_ADD_OPTIONS", "[", "settype", "]", ":", "if", "item", "in", "kwargs", ":", "cmd", "=", "'{0} {1} {2}'", ".", "format", "(", "cmd", ",", "item", ",", "kwargs", "[", "item", "]", ")", "current_members", "=", "_find_set_members", "(", "setname", ")", "if", "cmd", "in", "current_members", ":", "return", "'Warn: Entry {0} already exists in set {1}'", ".", "format", "(", "cmd", ",", "setname", ")", "# Using -exist to ensure entries are updated if the comment changes", "cmd", "=", "'{0} add -exist {1} {2}'", ".", "format", "(", "_ipset_cmd", "(", ")", ",", "setname", ",", "cmd", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "if", "not", "out", ":", "return", "'Success'", "return", "'Error: {0}'", ".", "format", "(", "out", ")" ]
Append an entry to the specified set. CLI Example: .. code-block:: bash salt '*' ipset.add setname 192.168.1.26 salt '*' ipset.add setname 192.168.0.3,AA:BB:CC:DD:EE:FF
[ "Append", "an", "entry", "to", "the", "specified", "set", "." ]
python
train
32.474576
iotile/coretools
iotilesensorgraph/iotile/sg/node_descriptor.py
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/node_descriptor.py#L241-L266
def _create_binary_trigger(trigger): """Create an 8-bit binary trigger from an InputTrigger, TrueTrigger, FalseTrigger.""" ops = { 0: ">", 1: "<", 2: ">=", 3: "<=", 4: "==", 5: 'always' } op_codes = {y: x for x, y in ops.items()} source = 0 if isinstance(trigger, TrueTrigger): op_code = op_codes['always'] elif isinstance(trigger, FalseTrigger): raise ArgumentError("Cannot express a never trigger in binary descriptor", trigger=trigger) else: op_code = op_codes[trigger.comp_string] if trigger.use_count: source = 1 return (op_code << 1) | source
[ "def", "_create_binary_trigger", "(", "trigger", ")", ":", "ops", "=", "{", "0", ":", "\">\"", ",", "1", ":", "\"<\"", ",", "2", ":", "\">=\"", ",", "3", ":", "\"<=\"", ",", "4", ":", "\"==\"", ",", "5", ":", "'always'", "}", "op_codes", "=", "{", "y", ":", "x", "for", "x", ",", "y", "in", "ops", ".", "items", "(", ")", "}", "source", "=", "0", "if", "isinstance", "(", "trigger", ",", "TrueTrigger", ")", ":", "op_code", "=", "op_codes", "[", "'always'", "]", "elif", "isinstance", "(", "trigger", ",", "FalseTrigger", ")", ":", "raise", "ArgumentError", "(", "\"Cannot express a never trigger in binary descriptor\"", ",", "trigger", "=", "trigger", ")", "else", ":", "op_code", "=", "op_codes", "[", "trigger", ".", "comp_string", "]", "if", "trigger", ".", "use_count", ":", "source", "=", "1", "return", "(", "op_code", "<<", "1", ")", "|", "source" ]
Create an 8-bit binary trigger from an InputTrigger, TrueTrigger, FalseTrigger.
[ "Create", "an", "8", "-", "bit", "binary", "trigger", "from", "an", "InputTrigger", "TrueTrigger", "FalseTrigger", "." ]
python
train
25.230769
neon-jungle/wagtailnews
wagtailnews/views/editor.py
https://github.com/neon-jungle/wagtailnews/blob/4cdec7013cca276dcfc658d3c986444ba6a42a84/wagtailnews/views/editor.py#L225-L268
def build_dummy_request(newsitem): """ Construct a HttpRequest object that is, as far as possible, representative of ones that would receive this page as a response. Used for previewing / moderation and any other place where we want to display a view of this page in the admin interface without going through the regular page routing logic. """ url = newsitem.full_url if url: url_info = urlparse(url) hostname = url_info.hostname path = url_info.path port = url_info.port or 80 else: # Cannot determine a URL to this page - cobble one together based on # whatever we find in ALLOWED_HOSTS try: hostname = settings.ALLOWED_HOSTS[0] except IndexError: hostname = 'localhost' path = '/' port = 80 request = WSGIRequest({ 'REQUEST_METHOD': 'GET', 'PATH_INFO': path, 'SERVER_NAME': hostname, 'SERVER_PORT': port, 'HTTP_HOST': hostname, 'wsgi.input': StringIO(), }) # Apply middleware to the request - see http://www.mellowmorning.com/2011/04/18/mock-django-request-for-testing/ handler = BaseHandler() handler.load_middleware() # call each middleware in turn and throw away any responses that they might return if hasattr(handler, '_request_middleware'): for middleware_method in handler._request_middleware: middleware_method(request) else: handler.get_response(request) return request
[ "def", "build_dummy_request", "(", "newsitem", ")", ":", "url", "=", "newsitem", ".", "full_url", "if", "url", ":", "url_info", "=", "urlparse", "(", "url", ")", "hostname", "=", "url_info", ".", "hostname", "path", "=", "url_info", ".", "path", "port", "=", "url_info", ".", "port", "or", "80", "else", ":", "# Cannot determine a URL to this page - cobble one together based on", "# whatever we find in ALLOWED_HOSTS", "try", ":", "hostname", "=", "settings", ".", "ALLOWED_HOSTS", "[", "0", "]", "except", "IndexError", ":", "hostname", "=", "'localhost'", "path", "=", "'/'", "port", "=", "80", "request", "=", "WSGIRequest", "(", "{", "'REQUEST_METHOD'", ":", "'GET'", ",", "'PATH_INFO'", ":", "path", ",", "'SERVER_NAME'", ":", "hostname", ",", "'SERVER_PORT'", ":", "port", ",", "'HTTP_HOST'", ":", "hostname", ",", "'wsgi.input'", ":", "StringIO", "(", ")", ",", "}", ")", "# Apply middleware to the request - see http://www.mellowmorning.com/2011/04/18/mock-django-request-for-testing/", "handler", "=", "BaseHandler", "(", ")", "handler", ".", "load_middleware", "(", ")", "# call each middleware in turn and throw away any responses that they might return", "if", "hasattr", "(", "handler", ",", "'_request_middleware'", ")", ":", "for", "middleware_method", "in", "handler", ".", "_request_middleware", ":", "middleware_method", "(", "request", ")", "else", ":", "handler", ".", "get_response", "(", "request", ")", "return", "request" ]
Construct a HttpRequest object that is, as far as possible, representative of ones that would receive this page as a response. Used for previewing / moderation and any other place where we want to display a view of this page in the admin interface without going through the regular page routing logic.
[ "Construct", "a", "HttpRequest", "object", "that", "is", "as", "far", "as", "possible", "representative", "of", "ones", "that", "would", "receive", "this", "page", "as", "a", "response", ".", "Used", "for", "previewing", "/", "moderation", "and", "any", "other", "place", "where", "we", "want", "to", "display", "a", "view", "of", "this", "page", "in", "the", "admin", "interface", "without", "going", "through", "the", "regular", "page", "routing", "logic", "." ]
python
train
34.045455
eventable/vobject
docs/build/lib/vobject/icalendar.py
https://github.com/eventable/vobject/blob/498555a553155ea9b26aace93332ae79365ecb31/docs/build/lib/vobject/icalendar.py#L1929-L1951
def tzinfo_eq(tzinfo1, tzinfo2, startYear = 2000, endYear=2020): """ Compare offsets and DST transitions from startYear to endYear. """ if tzinfo1 == tzinfo2: return True elif tzinfo1 is None or tzinfo2 is None: return False def dt_test(dt): if dt is None: return True return tzinfo1.utcoffset(dt) == tzinfo2.utcoffset(dt) if not dt_test(datetime.datetime(startYear, 1, 1)): return False for year in range(startYear, endYear): for transitionTo in 'daylight', 'standard': t1=getTransition(transitionTo, year, tzinfo1) t2=getTransition(transitionTo, year, tzinfo2) if t1 != t2 or not dt_test(t1): return False return True
[ "def", "tzinfo_eq", "(", "tzinfo1", ",", "tzinfo2", ",", "startYear", "=", "2000", ",", "endYear", "=", "2020", ")", ":", "if", "tzinfo1", "==", "tzinfo2", ":", "return", "True", "elif", "tzinfo1", "is", "None", "or", "tzinfo2", "is", "None", ":", "return", "False", "def", "dt_test", "(", "dt", ")", ":", "if", "dt", "is", "None", ":", "return", "True", "return", "tzinfo1", ".", "utcoffset", "(", "dt", ")", "==", "tzinfo2", ".", "utcoffset", "(", "dt", ")", "if", "not", "dt_test", "(", "datetime", ".", "datetime", "(", "startYear", ",", "1", ",", "1", ")", ")", ":", "return", "False", "for", "year", "in", "range", "(", "startYear", ",", "endYear", ")", ":", "for", "transitionTo", "in", "'daylight'", ",", "'standard'", ":", "t1", "=", "getTransition", "(", "transitionTo", ",", "year", ",", "tzinfo1", ")", "t2", "=", "getTransition", "(", "transitionTo", ",", "year", ",", "tzinfo2", ")", "if", "t1", "!=", "t2", "or", "not", "dt_test", "(", "t1", ")", ":", "return", "False", "return", "True" ]
Compare offsets and DST transitions from startYear to endYear.
[ "Compare", "offsets", "and", "DST", "transitions", "from", "startYear", "to", "endYear", "." ]
python
train
32.478261
tonybaloney/wily
wily/operators/__init__.py
https://github.com/tonybaloney/wily/blob/bae259354a91b57d56603f0ca7403186f086a84c/wily/operators/__init__.py#L170-L188
def resolve_metric_as_tuple(metric): """ Resolve metric key to a given target. :param metric: the metric name. :type metric: ``str`` :rtype: :class:`Metric` """ if "." in metric: _, metric = metric.split(".") r = [ (operator, match) for operator, match in ALL_METRICS if match[0] == metric ] if not r or len(r) == 0: raise ValueError(f"Metric {metric} not recognised.") else: return r[0]
[ "def", "resolve_metric_as_tuple", "(", "metric", ")", ":", "if", "\".\"", "in", "metric", ":", "_", ",", "metric", "=", "metric", ".", "split", "(", "\".\"", ")", "r", "=", "[", "(", "operator", ",", "match", ")", "for", "operator", ",", "match", "in", "ALL_METRICS", "if", "match", "[", "0", "]", "==", "metric", "]", "if", "not", "r", "or", "len", "(", "r", ")", "==", "0", ":", "raise", "ValueError", "(", "f\"Metric {metric} not recognised.\"", ")", "else", ":", "return", "r", "[", "0", "]" ]
Resolve metric key to a given target. :param metric: the metric name. :type metric: ``str`` :rtype: :class:`Metric`
[ "Resolve", "metric", "key", "to", "a", "given", "target", "." ]
python
train
23.631579
saltstack/salt
salt/version.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/version.py#L707-L717
def versions_information(include_salt_cloud=False): ''' Report the versions of dependent software. ''' salt_info = list(salt_information()) lib_info = list(dependency_information(include_salt_cloud)) sys_info = list(system_information()) return {'Salt Version': dict(salt_info), 'Dependency Versions': dict(lib_info), 'System Versions': dict(sys_info)}
[ "def", "versions_information", "(", "include_salt_cloud", "=", "False", ")", ":", "salt_info", "=", "list", "(", "salt_information", "(", ")", ")", "lib_info", "=", "list", "(", "dependency_information", "(", "include_salt_cloud", ")", ")", "sys_info", "=", "list", "(", "system_information", "(", ")", ")", "return", "{", "'Salt Version'", ":", "dict", "(", "salt_info", ")", ",", "'Dependency Versions'", ":", "dict", "(", "lib_info", ")", ",", "'System Versions'", ":", "dict", "(", "sys_info", ")", "}" ]
Report the versions of dependent software.
[ "Report", "the", "versions", "of", "dependent", "software", "." ]
python
train
35.909091
Rapptz/discord.py
discord/colour.py
https://github.com/Rapptz/discord.py/blob/05d4f7f9620ef33635d6ac965b26528e09cdaf5b/discord/colour.py#L110-L113
def from_hsv(cls, h, s, v): """Constructs a :class:`Colour` from an HSV tuple.""" rgb = colorsys.hsv_to_rgb(h, s, v) return cls.from_rgb(*(int(x * 255) for x in rgb))
[ "def", "from_hsv", "(", "cls", ",", "h", ",", "s", ",", "v", ")", ":", "rgb", "=", "colorsys", ".", "hsv_to_rgb", "(", "h", ",", "s", ",", "v", ")", "return", "cls", ".", "from_rgb", "(", "*", "(", "int", "(", "x", "*", "255", ")", "for", "x", "in", "rgb", ")", ")" ]
Constructs a :class:`Colour` from an HSV tuple.
[ "Constructs", "a", ":", "class", ":", "Colour", "from", "an", "HSV", "tuple", "." ]
python
train
46.75
ynop/audiomate
audiomate/utils/naming.py
https://github.com/ynop/audiomate/blob/61727920b23a708293c3d526fa3000d4de9c6c21/audiomate/utils/naming.py#L33-L49
def generate_name(length=15, not_in=None): """ Generates a random string of lowercase letters with the given length. Parameters: length (int): Length of the string to output. not_in (list): Only return a string not in the given iterator. Returns: str: A new name thats not in the given list. """ value = ''.join(random.choice(string.ascii_lowercase) for i in range(length)) while (not_in is not None) and (value in not_in): value = ''.join(random.choice(string.ascii_lowercase) for i in range(length)) return value
[ "def", "generate_name", "(", "length", "=", "15", ",", "not_in", "=", "None", ")", ":", "value", "=", "''", ".", "join", "(", "random", ".", "choice", "(", "string", ".", "ascii_lowercase", ")", "for", "i", "in", "range", "(", "length", ")", ")", "while", "(", "not_in", "is", "not", "None", ")", "and", "(", "value", "in", "not_in", ")", ":", "value", "=", "''", ".", "join", "(", "random", ".", "choice", "(", "string", ".", "ascii_lowercase", ")", "for", "i", "in", "range", "(", "length", ")", ")", "return", "value" ]
Generates a random string of lowercase letters with the given length. Parameters: length (int): Length of the string to output. not_in (list): Only return a string not in the given iterator. Returns: str: A new name thats not in the given list.
[ "Generates", "a", "random", "string", "of", "lowercase", "letters", "with", "the", "given", "length", "." ]
python
train
33.294118
Microsoft/botbuilder-python
libraries/botbuilder-dialogs/botbuilder/dialogs/dialog.py
https://github.com/Microsoft/botbuilder-python/blob/274663dd91c811bae6ac4488915ba5880771b0a7/libraries/botbuilder-dialogs/botbuilder/dialogs/dialog.py#L33-L40
def telemetry_client(self, value: BotTelemetryClient) -> None: """ Sets the telemetry client for logging events. """ if value is None: self._telemetry_client = NullTelemetryClient() else: self._telemetry_client = value
[ "def", "telemetry_client", "(", "self", ",", "value", ":", "BotTelemetryClient", ")", "->", "None", ":", "if", "value", "is", "None", ":", "self", ".", "_telemetry_client", "=", "NullTelemetryClient", "(", ")", "else", ":", "self", ".", "_telemetry_client", "=", "value" ]
Sets the telemetry client for logging events.
[ "Sets", "the", "telemetry", "client", "for", "logging", "events", "." ]
python
test
34.375
agile4you/bottle-neck
bottle_neck/routing.py
https://github.com/agile4you/bottle-neck/blob/ebc670a4b178255473d68e9b4122ba04e38f4810/bottle_neck/routing.py#L38-L61
def wrap_callable(cls, uri, methods, callable_obj): """Wraps function-based callable_obj into a `Route` instance, else proxies a `bottle_neck.handlers.BaseHandler` subclass instance. Args: uri (str): The uri relative path. methods (tuple): A tuple of valid method strings. callable_obj (instance): The callable object. Returns: A route instance. Raises: RouteError for invalid callable object type. """ if isinstance(callable_obj, HandlerMeta): callable_obj.base_endpoint = uri callable_obj.is_valid = True return callable_obj if isinstance(callable_obj, types.FunctionType): return cls(uri=uri, methods=methods, callable_obj=callable_obj) raise RouteError("Invalid handler type.")
[ "def", "wrap_callable", "(", "cls", ",", "uri", ",", "methods", ",", "callable_obj", ")", ":", "if", "isinstance", "(", "callable_obj", ",", "HandlerMeta", ")", ":", "callable_obj", ".", "base_endpoint", "=", "uri", "callable_obj", ".", "is_valid", "=", "True", "return", "callable_obj", "if", "isinstance", "(", "callable_obj", ",", "types", ".", "FunctionType", ")", ":", "return", "cls", "(", "uri", "=", "uri", ",", "methods", "=", "methods", ",", "callable_obj", "=", "callable_obj", ")", "raise", "RouteError", "(", "\"Invalid handler type.\"", ")" ]
Wraps function-based callable_obj into a `Route` instance, else proxies a `bottle_neck.handlers.BaseHandler` subclass instance. Args: uri (str): The uri relative path. methods (tuple): A tuple of valid method strings. callable_obj (instance): The callable object. Returns: A route instance. Raises: RouteError for invalid callable object type.
[ "Wraps", "function", "-", "based", "callable_obj", "into", "a", "Route", "instance", "else", "proxies", "a", "bottle_neck", ".", "handlers", ".", "BaseHandler", "subclass", "instance", "." ]
python
train
35.166667
ultrabug/py3status
py3status/docstrings.py
https://github.com/ultrabug/py3status/blob/4c105f1b44f7384ca4f7da5f821a47e468c7dee2/py3status/docstrings.py#L10-L14
def modules_directory(): """ Get the core modules directory. """ return os.path.join(os.path.dirname(os.path.abspath(__file__)), "modules")
[ "def", "modules_directory", "(", ")", ":", "return", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", ",", "\"modules\"", ")" ]
Get the core modules directory.
[ "Get", "the", "core", "modules", "directory", "." ]
python
train
30.2
iotile/coretools
iotilebuild/iotile/build/utilities/bundled_data.py
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/utilities/bundled_data.py#L22-L65
def resource_path(relative_path=None, expect=None): """Return the absolute path to a resource in iotile-build. This method finds the path to the `config` folder inside iotile-build, appends `relative_path` to it and then checks to make sure the desired file or directory exists. You can specify expect=(None, 'file', or 'folder') for what you expect to find at the given path. Args: relative_path (str): The relative_path from the config folder to the resource in question. This path can be specified using / characters on all operating systems since it will be normalized before usage. If None is passed, the based config folder will be returned. expect (str): What the path should resolve to, which is checked before returning, raising a DataError if the check fails. You can pass None for no checking, file for checking `os.path.isfile`, or folder for checking `os.path.isdir`. Default: None Returns: str: The normalized absolute path to the resource. """ if expect not in (None, 'file', 'folder'): raise ArgumentError("Invalid expect parameter, must be None, 'file' or 'folder'", expect=expect) this_dir = os.path.dirname(__file__) _resource_path = os.path.join(this_dir, '..', 'config') if relative_path is not None: path = os.path.normpath(relative_path) _resource_path = os.path.join(_resource_path, path) if expect == 'file' and not os.path.isfile(_resource_path): raise DataError("Expected resource %s to be a file and it wasn't" % _resource_path) elif expect == 'folder' and not os.path.isdir(_resource_path): raise DataError("Expected resource %s to be a folder and it wasn't" % _resource_path) return os.path.abspath(_resource_path)
[ "def", "resource_path", "(", "relative_path", "=", "None", ",", "expect", "=", "None", ")", ":", "if", "expect", "not", "in", "(", "None", ",", "'file'", ",", "'folder'", ")", ":", "raise", "ArgumentError", "(", "\"Invalid expect parameter, must be None, 'file' or 'folder'\"", ",", "expect", "=", "expect", ")", "this_dir", "=", "os", ".", "path", ".", "dirname", "(", "__file__", ")", "_resource_path", "=", "os", ".", "path", ".", "join", "(", "this_dir", ",", "'..'", ",", "'config'", ")", "if", "relative_path", "is", "not", "None", ":", "path", "=", "os", ".", "path", ".", "normpath", "(", "relative_path", ")", "_resource_path", "=", "os", ".", "path", ".", "join", "(", "_resource_path", ",", "path", ")", "if", "expect", "==", "'file'", "and", "not", "os", ".", "path", ".", "isfile", "(", "_resource_path", ")", ":", "raise", "DataError", "(", "\"Expected resource %s to be a file and it wasn't\"", "%", "_resource_path", ")", "elif", "expect", "==", "'folder'", "and", "not", "os", ".", "path", ".", "isdir", "(", "_resource_path", ")", ":", "raise", "DataError", "(", "\"Expected resource %s to be a folder and it wasn't\"", "%", "_resource_path", ")", "return", "os", ".", "path", ".", "abspath", "(", "_resource_path", ")" ]
Return the absolute path to a resource in iotile-build. This method finds the path to the `config` folder inside iotile-build, appends `relative_path` to it and then checks to make sure the desired file or directory exists. You can specify expect=(None, 'file', or 'folder') for what you expect to find at the given path. Args: relative_path (str): The relative_path from the config folder to the resource in question. This path can be specified using / characters on all operating systems since it will be normalized before usage. If None is passed, the based config folder will be returned. expect (str): What the path should resolve to, which is checked before returning, raising a DataError if the check fails. You can pass None for no checking, file for checking `os.path.isfile`, or folder for checking `os.path.isdir`. Default: None Returns: str: The normalized absolute path to the resource.
[ "Return", "the", "absolute", "path", "to", "a", "resource", "in", "iotile", "-", "build", "." ]
python
train
42.659091
clchiou/startup
startup.py
https://github.com/clchiou/startup/blob/13cbf3ce1deffbc10d33a5f64c396a73129a5929/startup.py#L224-L236
def _get_not_annotated(func, annotations=None): """Return non-optional parameters that are not annotated.""" argspec = inspect.getfullargspec(func) args = argspec.args if argspec.defaults is not None: args = args[:-len(argspec.defaults)] if inspect.isclass(func) or inspect.ismethod(func): args = args[1:] # Strip off ``cls`` or ``self``. kwonlyargs = argspec.kwonlyargs if argspec.kwonlydefaults is not None: kwonlyargs = kwonlyargs[:-len(argspec.kwonlydefaults)] annotations = annotations or argspec.annotations return [arg for arg in args + kwonlyargs if arg not in annotations]
[ "def", "_get_not_annotated", "(", "func", ",", "annotations", "=", "None", ")", ":", "argspec", "=", "inspect", ".", "getfullargspec", "(", "func", ")", "args", "=", "argspec", ".", "args", "if", "argspec", ".", "defaults", "is", "not", "None", ":", "args", "=", "args", "[", ":", "-", "len", "(", "argspec", ".", "defaults", ")", "]", "if", "inspect", ".", "isclass", "(", "func", ")", "or", "inspect", ".", "ismethod", "(", "func", ")", ":", "args", "=", "args", "[", "1", ":", "]", "# Strip off ``cls`` or ``self``.", "kwonlyargs", "=", "argspec", ".", "kwonlyargs", "if", "argspec", ".", "kwonlydefaults", "is", "not", "None", ":", "kwonlyargs", "=", "kwonlyargs", "[", ":", "-", "len", "(", "argspec", ".", "kwonlydefaults", ")", "]", "annotations", "=", "annotations", "or", "argspec", ".", "annotations", "return", "[", "arg", "for", "arg", "in", "args", "+", "kwonlyargs", "if", "arg", "not", "in", "annotations", "]" ]
Return non-optional parameters that are not annotated.
[ "Return", "non", "-", "optional", "parameters", "that", "are", "not", "annotated", "." ]
python
train
48.461538
citruz/beacontools
beacontools/packet_types/estimote.py
https://github.com/citruz/beacontools/blob/15a83e9750d0a4393f8a36868e07f6d9458253fe/beacontools/packet_types/estimote.py#L46-L59
def parse_motion_state(val): """Convert motion state byte to seconds.""" number = val & 0b00111111 unit = (val & 0b11000000) >> 6 if unit == 1: number *= 60 # minutes elif unit == 2: number *= 60 * 60 # hours elif unit == 3 and number < 32: number *= 60 * 60 * 24 # days elif unit == 3: number -= 32 number *= 60 * 60 * 24 * 7 # weeks return number
[ "def", "parse_motion_state", "(", "val", ")", ":", "number", "=", "val", "&", "0b00111111", "unit", "=", "(", "val", "&", "0b11000000", ")", ">>", "6", "if", "unit", "==", "1", ":", "number", "*=", "60", "# minutes", "elif", "unit", "==", "2", ":", "number", "*=", "60", "*", "60", "# hours", "elif", "unit", "==", "3", "and", "number", "<", "32", ":", "number", "*=", "60", "*", "60", "*", "24", "# days", "elif", "unit", "==", "3", ":", "number", "-=", "32", "number", "*=", "60", "*", "60", "*", "24", "*", "7", "# weeks", "return", "number" ]
Convert motion state byte to seconds.
[ "Convert", "motion", "state", "byte", "to", "seconds", "." ]
python
train
32.785714
hydpy-dev/hydpy
hydpy/core/sequencetools.py
https://github.com/hydpy-dev/hydpy/blob/1bc6a82cf30786521d86b36e27900c6717d3348d/hydpy/core/sequencetools.py#L1523-L1538
def old(self): """Assess to the state value(s) at beginning of the time step, which has been processed most recently. When using *HydPy* in the normal manner. But it can be helpful for demonstration and debugging purposes. """ value = getattr(self.fastaccess_old, self.name, None) if value is None: raise RuntimeError( 'No value/values of sequence %s has/have ' 'not been defined so far.' % objecttools.elementphrase(self)) else: if self.NDIM: value = numpy.asarray(value) return value
[ "def", "old", "(", "self", ")", ":", "value", "=", "getattr", "(", "self", ".", "fastaccess_old", ",", "self", ".", "name", ",", "None", ")", "if", "value", "is", "None", ":", "raise", "RuntimeError", "(", "'No value/values of sequence %s has/have '", "'not been defined so far.'", "%", "objecttools", ".", "elementphrase", "(", "self", ")", ")", "else", ":", "if", "self", ".", "NDIM", ":", "value", "=", "numpy", ".", "asarray", "(", "value", ")", "return", "value" ]
Assess to the state value(s) at beginning of the time step, which has been processed most recently. When using *HydPy* in the normal manner. But it can be helpful for demonstration and debugging purposes.
[ "Assess", "to", "the", "state", "value", "(", "s", ")", "at", "beginning", "of", "the", "time", "step", "which", "has", "been", "processed", "most", "recently", ".", "When", "using", "*", "HydPy", "*", "in", "the", "normal", "manner", ".", "But", "it", "can", "be", "helpful", "for", "demonstration", "and", "debugging", "purposes", "." ]
python
train
39.75
bram85/topydo
topydo/lib/ListFormat.py
https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/ListFormat.py#L268-L307
def parse(self, p_todo): """ Returns fully parsed string from 'format_string' attribute with all placeholders properly substituted by content obtained from p_todo. It uses preprocessed form of 'format_string' (result of ListFormatParser._preprocess_format) stored in 'format_list' attribute. """ parsed_list = [] repl_trunc = None for substr, placeholder, getter in self.format_list: repl = getter(p_todo) if getter else '' pattern = MAIN_PATTERN.format(ph=placeholder) if placeholder == 'S': repl_trunc = repl try: if repl == '': substr = re.sub(pattern, '', substr) else: substr = re.sub(pattern, _strip_placeholder_braces, substr) substr = re.sub(r'(?<!\\)%({ph}|\[{ph}\])'.format(ph=placeholder), repl, substr) except re.error: raise ListFormatError parsed_list.append(substr) parsed_str = _unescape_percent_sign(''.join(parsed_list)) parsed_str = _remove_redundant_spaces(parsed_str) if self.one_line and len(escape_ansi(parsed_str)) >= _columns(): parsed_str = _truncate(parsed_str, repl_trunc) if re.search('.*\t', parsed_str): parsed_str = _right_align(parsed_str) return parsed_str.rstrip()
[ "def", "parse", "(", "self", ",", "p_todo", ")", ":", "parsed_list", "=", "[", "]", "repl_trunc", "=", "None", "for", "substr", ",", "placeholder", ",", "getter", "in", "self", ".", "format_list", ":", "repl", "=", "getter", "(", "p_todo", ")", "if", "getter", "else", "''", "pattern", "=", "MAIN_PATTERN", ".", "format", "(", "ph", "=", "placeholder", ")", "if", "placeholder", "==", "'S'", ":", "repl_trunc", "=", "repl", "try", ":", "if", "repl", "==", "''", ":", "substr", "=", "re", ".", "sub", "(", "pattern", ",", "''", ",", "substr", ")", "else", ":", "substr", "=", "re", ".", "sub", "(", "pattern", ",", "_strip_placeholder_braces", ",", "substr", ")", "substr", "=", "re", ".", "sub", "(", "r'(?<!\\\\)%({ph}|\\[{ph}\\])'", ".", "format", "(", "ph", "=", "placeholder", ")", ",", "repl", ",", "substr", ")", "except", "re", ".", "error", ":", "raise", "ListFormatError", "parsed_list", ".", "append", "(", "substr", ")", "parsed_str", "=", "_unescape_percent_sign", "(", "''", ".", "join", "(", "parsed_list", ")", ")", "parsed_str", "=", "_remove_redundant_spaces", "(", "parsed_str", ")", "if", "self", ".", "one_line", "and", "len", "(", "escape_ansi", "(", "parsed_str", ")", ")", ">=", "_columns", "(", ")", ":", "parsed_str", "=", "_truncate", "(", "parsed_str", ",", "repl_trunc", ")", "if", "re", ".", "search", "(", "'.*\\t'", ",", "parsed_str", ")", ":", "parsed_str", "=", "_right_align", "(", "parsed_str", ")", "return", "parsed_str", ".", "rstrip", "(", ")" ]
Returns fully parsed string from 'format_string' attribute with all placeholders properly substituted by content obtained from p_todo. It uses preprocessed form of 'format_string' (result of ListFormatParser._preprocess_format) stored in 'format_list' attribute.
[ "Returns", "fully", "parsed", "string", "from", "format_string", "attribute", "with", "all", "placeholders", "properly", "substituted", "by", "content", "obtained", "from", "p_todo", "." ]
python
train
35.225
saltstack/salt
salt/modules/consul.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/consul.py#L1364-L1403
def session_info(consul_url=None, token=None, session=None, **kwargs): ''' Information about a session :param consul_url: The Consul server URL. :param session: The ID of the session to return information about. :param dc: By default, the datacenter of the agent is queried; however, the dc can be provided using the "dc" parameter. :return: Boolean & message of success or failure. CLI Example: .. code-block:: bash salt '*' consul.session_info session='c1c4d223-91cb-3d1f-1ee8-f2af9e7b6716' ''' ret = {} if not consul_url: consul_url = _get_config() if not consul_url: log.error('No Consul URL found.') ret['message'] = 'No Consul URL found.' ret['res'] = False return ret if not session: raise SaltInvocationError('Required argument "session" is missing.') query_params = {} if 'dc' in kwargs: query_params['dc'] = kwargs['dc'] function = 'session/info/{0}'.format(session) ret = _query(consul_url=consul_url, function=function, token=token, query_params=query_params) return ret
[ "def", "session_info", "(", "consul_url", "=", "None", ",", "token", "=", "None", ",", "session", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "{", "}", "if", "not", "consul_url", ":", "consul_url", "=", "_get_config", "(", ")", "if", "not", "consul_url", ":", "log", ".", "error", "(", "'No Consul URL found.'", ")", "ret", "[", "'message'", "]", "=", "'No Consul URL found.'", "ret", "[", "'res'", "]", "=", "False", "return", "ret", "if", "not", "session", ":", "raise", "SaltInvocationError", "(", "'Required argument \"session\" is missing.'", ")", "query_params", "=", "{", "}", "if", "'dc'", "in", "kwargs", ":", "query_params", "[", "'dc'", "]", "=", "kwargs", "[", "'dc'", "]", "function", "=", "'session/info/{0}'", ".", "format", "(", "session", ")", "ret", "=", "_query", "(", "consul_url", "=", "consul_url", ",", "function", "=", "function", ",", "token", "=", "token", ",", "query_params", "=", "query_params", ")", "return", "ret" ]
Information about a session :param consul_url: The Consul server URL. :param session: The ID of the session to return information about. :param dc: By default, the datacenter of the agent is queried; however, the dc can be provided using the "dc" parameter. :return: Boolean & message of success or failure. CLI Example: .. code-block:: bash salt '*' consul.session_info session='c1c4d223-91cb-3d1f-1ee8-f2af9e7b6716'
[ "Information", "about", "a", "session" ]
python
train
29.35
hazelcast/hazelcast-python-client
hazelcast/proxy/pn_counter.py
https://github.com/hazelcast/hazelcast-python-client/blob/3f6639443c23d6d036aa343f8e094f052250d2c1/hazelcast/proxy/pn_counter.py#L91-L103
def add_and_get(self, delta): """ Adds the given value to the current value and returns the updated value. :raises NoDataMemberInClusterError: if the cluster does not contain any data members. :raises UnsupportedOperationError: if the cluster version is less than 3.10. :raises ConsistencyLostError: if the session guarantees have been lost. :param delta: (int), the value to add. :return: (int), the updated value. """ return self._invoke_internal(pn_counter_add_codec, delta=delta, get_before_update=False)
[ "def", "add_and_get", "(", "self", ",", "delta", ")", ":", "return", "self", ".", "_invoke_internal", "(", "pn_counter_add_codec", ",", "delta", "=", "delta", ",", "get_before_update", "=", "False", ")" ]
Adds the given value to the current value and returns the updated value. :raises NoDataMemberInClusterError: if the cluster does not contain any data members. :raises UnsupportedOperationError: if the cluster version is less than 3.10. :raises ConsistencyLostError: if the session guarantees have been lost. :param delta: (int), the value to add. :return: (int), the updated value.
[ "Adds", "the", "given", "value", "to", "the", "current", "value", "and", "returns", "the", "updated", "value", "." ]
python
train
43.923077
lexich/yandex-disk-webdav
yandexwebdav.py
https://github.com/lexich/yandex-disk-webdav/blob/669f51f999ed14e137454b90e7d035e2ca171c75/yandexwebdav.py#L192-L202
def getHeaders(self): """ Get common headers :return: """ basicauth = base64.encodestring(b(self.user + ':' + self.password)).strip() return { "Depth": "1", "Authorization": 'Basic ' + _decode_utf8(basicauth), "Accept": "*/*" }
[ "def", "getHeaders", "(", "self", ")", ":", "basicauth", "=", "base64", ".", "encodestring", "(", "b", "(", "self", ".", "user", "+", "':'", "+", "self", ".", "password", ")", ")", ".", "strip", "(", ")", "return", "{", "\"Depth\"", ":", "\"1\"", ",", "\"Authorization\"", ":", "'Basic '", "+", "_decode_utf8", "(", "basicauth", ")", ",", "\"Accept\"", ":", "\"*/*\"", "}" ]
Get common headers :return:
[ "Get", "common", "headers", ":", "return", ":" ]
python
train
28.090909
liampauling/betfair
betfairlightweight/filters.py
https://github.com/liampauling/betfair/blob/8479392eb4849c525d78d43497c32c0bb108e977/betfairlightweight/filters.py#L163-L185
def place_instruction(order_type, selection_id, side, handicap=None, limit_order=None, limit_on_close_order=None, market_on_close_order=None, customer_order_ref=None): """ Create order instructions to place an order at exchange. :param str order_type: define type of order to place. :param int selection_id: selection on which to place order :param float handicap: handicap if placing order on asianhandicap type market :param str side: side of order :param resources.LimitOrder limit_order: if orderType is a limitOrder structure details of the order. :param resources.LimitOnCloseOrder limit_on_close_order: if orderType is a limitOnCloseOrder structure details of the order. :param resources.MarketOnCloseOrder market_on_close_order: if orderType is a marketOnCloseOrder structure details of the order. :param str customer_order_ref: an optional reference customers can set to identify instructions.. :return: orders to place. :rtype: dict """ args = locals() return { to_camel_case(k): v for k, v in args.items() if v is not None }
[ "def", "place_instruction", "(", "order_type", ",", "selection_id", ",", "side", ",", "handicap", "=", "None", ",", "limit_order", "=", "None", ",", "limit_on_close_order", "=", "None", ",", "market_on_close_order", "=", "None", ",", "customer_order_ref", "=", "None", ")", ":", "args", "=", "locals", "(", ")", "return", "{", "to_camel_case", "(", "k", ")", ":", "v", "for", "k", ",", "v", "in", "args", ".", "items", "(", ")", "if", "v", "is", "not", "None", "}" ]
Create order instructions to place an order at exchange. :param str order_type: define type of order to place. :param int selection_id: selection on which to place order :param float handicap: handicap if placing order on asianhandicap type market :param str side: side of order :param resources.LimitOrder limit_order: if orderType is a limitOrder structure details of the order. :param resources.LimitOnCloseOrder limit_on_close_order: if orderType is a limitOnCloseOrder structure details of the order. :param resources.MarketOnCloseOrder market_on_close_order: if orderType is a marketOnCloseOrder structure details of the order. :param str customer_order_ref: an optional reference customers can set to identify instructions.. :return: orders to place. :rtype: dict
[ "Create", "order", "instructions", "to", "place", "an", "order", "at", "exchange", ".", ":", "param", "str", "order_type", ":", "define", "type", "of", "order", "to", "place", ".", ":", "param", "int", "selection_id", ":", "selection", "on", "which", "to", "place", "order", ":", "param", "float", "handicap", ":", "handicap", "if", "placing", "order", "on", "asianhandicap", "type", "market", ":", "param", "str", "side", ":", "side", "of", "order", ":", "param", "resources", ".", "LimitOrder", "limit_order", ":", "if", "orderType", "is", "a", "limitOrder", "structure", "details", "of", "the", "order", ".", ":", "param", "resources", ".", "LimitOnCloseOrder", "limit_on_close_order", ":", "if", "orderType", "is", "a", "limitOnCloseOrder", "structure", "details", "of", "the", "order", ".", ":", "param", "resources", ".", "MarketOnCloseOrder", "market_on_close_order", ":", "if", "orderType", "is", "a", "marketOnCloseOrder", "structure", "details", "of", "the", "order", ".", ":", "param", "str", "customer_order_ref", ":", "an", "optional", "reference", "customers", "can", "set", "to", "identify", "instructions", ".." ]
python
train
48.565217
tomplus/kubernetes_asyncio
kubernetes_asyncio/client/api/apiextensions_v1beta1_api.py
https://github.com/tomplus/kubernetes_asyncio/blob/f9ab15317ec921409714c7afef11aeb0f579985d/kubernetes_asyncio/client/api/apiextensions_v1beta1_api.py#L715-L738
def patch_custom_resource_definition_status(self, name, body, **kwargs): # noqa: E501 """patch_custom_resource_definition_status # noqa: E501 partially update status of the specified CustomResourceDefinition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_custom_resource_definition_status(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the CustomResourceDefinition (required) :param UNKNOWN_BASE_TYPE body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1beta1CustomResourceDefinition If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_custom_resource_definition_status_with_http_info(name, body, **kwargs) # noqa: E501 else: (data) = self.patch_custom_resource_definition_status_with_http_info(name, body, **kwargs) # noqa: E501 return data
[ "def", "patch_custom_resource_definition_status", "(", "self", ",", "name", ",", "body", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "patch_custom_resource_definition_status_with_http_info", "(", "name", ",", "body", ",", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "patch_custom_resource_definition_status_with_http_info", "(", "name", ",", "body", ",", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
patch_custom_resource_definition_status # noqa: E501 partially update status of the specified CustomResourceDefinition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_custom_resource_definition_status(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the CustomResourceDefinition (required) :param UNKNOWN_BASE_TYPE body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1beta1CustomResourceDefinition If the method is called asynchronously, returns the request thread.
[ "patch_custom_resource_definition_status", "#", "noqa", ":", "E501" ]
python
train
62.625
ewels/MultiQC
multiqc/modules/bismark/bismark.py
https://github.com/ewels/MultiQC/blob/2037d6322b2554146a74efbf869156ad20d4c4ec/multiqc/modules/bismark/bismark.py#L186-L232
def parse_bismark_mbias(self, f): """ Parse the Bismark M-Bias plot data """ s = f['s_name'] self.bismark_mbias_data['meth']['CpG_R1'][s] = {} self.bismark_mbias_data['meth']['CHG_R1'][s] = {} self.bismark_mbias_data['meth']['CHH_R1'][s] = {} self.bismark_mbias_data['cov']['CpG_R1'][s] = {} self.bismark_mbias_data['cov']['CHG_R1'][s] = {} self.bismark_mbias_data['cov']['CHH_R1'][s] = {} self.bismark_mbias_data['meth']['CpG_R2'][s] = {} self.bismark_mbias_data['meth']['CHG_R2'][s] = {} self.bismark_mbias_data['meth']['CHH_R2'][s] = {} self.bismark_mbias_data['cov']['CpG_R2'][s] = {} self.bismark_mbias_data['cov']['CHG_R2'][s] = {} self.bismark_mbias_data['cov']['CHH_R2'][s] = {} key = None for l in f['f']: if 'context' in l: if 'CpG' in l: key = 'CpG' elif 'CHG' in l: key = 'CHG' elif 'CHH' in l: key = 'CHH' if '(R1)' in l: key += '_R1' elif '(R2)' in l: key += '_R2' else: key += '_R1' if key is not None: sections = l.split() try: pos = int(sections[0]) self.bismark_mbias_data['meth'][key][s][pos] = float(sections[3]) self.bismark_mbias_data['cov'][key][s][pos] = int(sections[4]) except (IndexError, ValueError): continue # Remove empty dicts (eg. R2 for SE data) for t in self.bismark_mbias_data: for k in self.bismark_mbias_data[t]: self.bismark_mbias_data[t][k] = { s_name: self.bismark_mbias_data[t][k][s_name] for s_name in self.bismark_mbias_data[t][k] if len(self.bismark_mbias_data[t][k][s_name]) > 0 }
[ "def", "parse_bismark_mbias", "(", "self", ",", "f", ")", ":", "s", "=", "f", "[", "'s_name'", "]", "self", ".", "bismark_mbias_data", "[", "'meth'", "]", "[", "'CpG_R1'", "]", "[", "s", "]", "=", "{", "}", "self", ".", "bismark_mbias_data", "[", "'meth'", "]", "[", "'CHG_R1'", "]", "[", "s", "]", "=", "{", "}", "self", ".", "bismark_mbias_data", "[", "'meth'", "]", "[", "'CHH_R1'", "]", "[", "s", "]", "=", "{", "}", "self", ".", "bismark_mbias_data", "[", "'cov'", "]", "[", "'CpG_R1'", "]", "[", "s", "]", "=", "{", "}", "self", ".", "bismark_mbias_data", "[", "'cov'", "]", "[", "'CHG_R1'", "]", "[", "s", "]", "=", "{", "}", "self", ".", "bismark_mbias_data", "[", "'cov'", "]", "[", "'CHH_R1'", "]", "[", "s", "]", "=", "{", "}", "self", ".", "bismark_mbias_data", "[", "'meth'", "]", "[", "'CpG_R2'", "]", "[", "s", "]", "=", "{", "}", "self", ".", "bismark_mbias_data", "[", "'meth'", "]", "[", "'CHG_R2'", "]", "[", "s", "]", "=", "{", "}", "self", ".", "bismark_mbias_data", "[", "'meth'", "]", "[", "'CHH_R2'", "]", "[", "s", "]", "=", "{", "}", "self", ".", "bismark_mbias_data", "[", "'cov'", "]", "[", "'CpG_R2'", "]", "[", "s", "]", "=", "{", "}", "self", ".", "bismark_mbias_data", "[", "'cov'", "]", "[", "'CHG_R2'", "]", "[", "s", "]", "=", "{", "}", "self", ".", "bismark_mbias_data", "[", "'cov'", "]", "[", "'CHH_R2'", "]", "[", "s", "]", "=", "{", "}", "key", "=", "None", "for", "l", "in", "f", "[", "'f'", "]", ":", "if", "'context'", "in", "l", ":", "if", "'CpG'", "in", "l", ":", "key", "=", "'CpG'", "elif", "'CHG'", "in", "l", ":", "key", "=", "'CHG'", "elif", "'CHH'", "in", "l", ":", "key", "=", "'CHH'", "if", "'(R1)'", "in", "l", ":", "key", "+=", "'_R1'", "elif", "'(R2)'", "in", "l", ":", "key", "+=", "'_R2'", "else", ":", "key", "+=", "'_R1'", "if", "key", "is", "not", "None", ":", "sections", "=", "l", ".", "split", "(", ")", "try", ":", "pos", "=", "int", "(", "sections", "[", "0", "]", ")", "self", ".", "bismark_mbias_data", "[", "'meth'", "]", "[", "key", "]", "[", "s", "]", "[", "pos", "]", "=", "float", "(", "sections", "[", "3", "]", ")", "self", ".", "bismark_mbias_data", "[", "'cov'", "]", "[", "key", "]", "[", "s", "]", "[", "pos", "]", "=", "int", "(", "sections", "[", "4", "]", ")", "except", "(", "IndexError", ",", "ValueError", ")", ":", "continue", "# Remove empty dicts (eg. R2 for SE data)", "for", "t", "in", "self", ".", "bismark_mbias_data", ":", "for", "k", "in", "self", ".", "bismark_mbias_data", "[", "t", "]", ":", "self", ".", "bismark_mbias_data", "[", "t", "]", "[", "k", "]", "=", "{", "s_name", ":", "self", ".", "bismark_mbias_data", "[", "t", "]", "[", "k", "]", "[", "s_name", "]", "for", "s_name", "in", "self", ".", "bismark_mbias_data", "[", "t", "]", "[", "k", "]", "if", "len", "(", "self", ".", "bismark_mbias_data", "[", "t", "]", "[", "k", "]", "[", "s_name", "]", ")", ">", "0", "}" ]
Parse the Bismark M-Bias plot data
[ "Parse", "the", "Bismark", "M", "-", "Bias", "plot", "data" ]
python
train
42.489362
googleapis/oauth2client
oauth2client/transport.py
https://github.com/googleapis/oauth2client/blob/50d20532a748f18e53f7d24ccbe6647132c979a9/oauth2client/transport.py#L204-L251
def wrap_http_for_jwt_access(credentials, http): """Prepares an HTTP object's request method for JWT access. Wraps HTTP requests with logic to catch auth failures (typically identified via a 401 status code). In the event of failure, tries to refresh the token used and then retry the original request. Args: credentials: _JWTAccessCredentials, the credentials used to identify a service account that uses JWT access tokens. http: httplib2.Http, an http object to be used to make auth requests. """ orig_request_method = http.request wrap_http_for_auth(credentials, http) # The new value of ``http.request`` set by ``wrap_http_for_auth``. authenticated_request_method = http.request # The closure that will replace 'httplib2.Http.request'. def new_request(uri, method='GET', body=None, headers=None, redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None): if 'aud' in credentials._kwargs: # Preemptively refresh token, this is not done for OAuth2 if (credentials.access_token is None or credentials.access_token_expired): credentials.refresh(None) return request(authenticated_request_method, uri, method, body, headers, redirections, connection_type) else: # If we don't have an 'aud' (audience) claim, # create a 1-time token with the uri root as the audience headers = _initialize_headers(headers) _apply_user_agent(headers, credentials.user_agent) uri_root = uri.split('?', 1)[0] token, unused_expiry = credentials._create_token({'aud': uri_root}) headers['Authorization'] = 'Bearer ' + token return request(orig_request_method, uri, method, body, clean_headers(headers), redirections, connection_type) # Replace the request method with our own closure. http.request = new_request # Set credentials as a property of the request method. http.request.credentials = credentials
[ "def", "wrap_http_for_jwt_access", "(", "credentials", ",", "http", ")", ":", "orig_request_method", "=", "http", ".", "request", "wrap_http_for_auth", "(", "credentials", ",", "http", ")", "# The new value of ``http.request`` set by ``wrap_http_for_auth``.", "authenticated_request_method", "=", "http", ".", "request", "# The closure that will replace 'httplib2.Http.request'.", "def", "new_request", "(", "uri", ",", "method", "=", "'GET'", ",", "body", "=", "None", ",", "headers", "=", "None", ",", "redirections", "=", "httplib2", ".", "DEFAULT_MAX_REDIRECTS", ",", "connection_type", "=", "None", ")", ":", "if", "'aud'", "in", "credentials", ".", "_kwargs", ":", "# Preemptively refresh token, this is not done for OAuth2", "if", "(", "credentials", ".", "access_token", "is", "None", "or", "credentials", ".", "access_token_expired", ")", ":", "credentials", ".", "refresh", "(", "None", ")", "return", "request", "(", "authenticated_request_method", ",", "uri", ",", "method", ",", "body", ",", "headers", ",", "redirections", ",", "connection_type", ")", "else", ":", "# If we don't have an 'aud' (audience) claim,", "# create a 1-time token with the uri root as the audience", "headers", "=", "_initialize_headers", "(", "headers", ")", "_apply_user_agent", "(", "headers", ",", "credentials", ".", "user_agent", ")", "uri_root", "=", "uri", ".", "split", "(", "'?'", ",", "1", ")", "[", "0", "]", "token", ",", "unused_expiry", "=", "credentials", ".", "_create_token", "(", "{", "'aud'", ":", "uri_root", "}", ")", "headers", "[", "'Authorization'", "]", "=", "'Bearer '", "+", "token", "return", "request", "(", "orig_request_method", ",", "uri", ",", "method", ",", "body", ",", "clean_headers", "(", "headers", ")", ",", "redirections", ",", "connection_type", ")", "# Replace the request method with our own closure.", "http", ".", "request", "=", "new_request", "# Set credentials as a property of the request method.", "http", ".", "request", ".", "credentials", "=", "credentials" ]
Prepares an HTTP object's request method for JWT access. Wraps HTTP requests with logic to catch auth failures (typically identified via a 401 status code). In the event of failure, tries to refresh the token used and then retry the original request. Args: credentials: _JWTAccessCredentials, the credentials used to identify a service account that uses JWT access tokens. http: httplib2.Http, an http object to be used to make auth requests.
[ "Prepares", "an", "HTTP", "object", "s", "request", "method", "for", "JWT", "access", "." ]
python
valid
45.75
vnmabus/dcor
dcor/distances.py
https://github.com/vnmabus/dcor/blob/b0ff1273c0a52efdabdfdadefc7ff2a49def7e8d/dcor/distances.py#L92-L149
def pairwise_distances(x, y=None, **kwargs): r""" pairwise_distances(x, y=None, *, exponent=1) Pairwise distance between points. Return the pairwise distance between points in two sets, or in the same set if only one set is passed. Parameters ---------- x: array_like An :math:`n \times m` array of :math:`n` observations in a :math:`m`-dimensional space. y: array_like An :math:`l \times m` array of :math:`l` observations in a :math:`m`-dimensional space. If None, the distances will be computed between the points in :math:`x`. exponent: float Exponent of the Euclidean distance. Returns ------- numpy ndarray A :math:`n \times l` matrix where the :math:`(i, j)`-th entry is the distance between :math:`x[i]` and :math:`y[j]`. Examples -------- >>> import numpy as np >>> import dcor >>> a = np.array([[1, 2, 3, 4], ... [5, 6, 7, 8], ... [9, 10, 11, 12], ... [13, 14, 15, 16]]) >>> b = np.array([[16, 15, 14, 13], ... [12, 11, 10, 9], ... [8, 7, 6, 5], ... [4, 3, 2, 1]]) >>> dcor.distances.pairwise_distances(a) array([[ 0., 8., 16., 24.], [ 8., 0., 8., 16.], [16., 8., 0., 8.], [24., 16., 8., 0.]]) >>> dcor.distances.pairwise_distances(a, b) array([[24.41311123, 16.61324773, 9.16515139, 4.47213595], [16.61324773, 9.16515139, 4.47213595, 9.16515139], [ 9.16515139, 4.47213595, 9.16515139, 16.61324773], [ 4.47213595, 9.16515139, 16.61324773, 24.41311123]]) """ x = _transform_to_2d(x) if y is None or y is x: return _pdist(x, **kwargs) else: y = _transform_to_2d(y) return _cdist(x, y, **kwargs)
[ "def", "pairwise_distances", "(", "x", ",", "y", "=", "None", ",", "*", "*", "kwargs", ")", ":", "x", "=", "_transform_to_2d", "(", "x", ")", "if", "y", "is", "None", "or", "y", "is", "x", ":", "return", "_pdist", "(", "x", ",", "*", "*", "kwargs", ")", "else", ":", "y", "=", "_transform_to_2d", "(", "y", ")", "return", "_cdist", "(", "x", ",", "y", ",", "*", "*", "kwargs", ")" ]
r""" pairwise_distances(x, y=None, *, exponent=1) Pairwise distance between points. Return the pairwise distance between points in two sets, or in the same set if only one set is passed. Parameters ---------- x: array_like An :math:`n \times m` array of :math:`n` observations in a :math:`m`-dimensional space. y: array_like An :math:`l \times m` array of :math:`l` observations in a :math:`m`-dimensional space. If None, the distances will be computed between the points in :math:`x`. exponent: float Exponent of the Euclidean distance. Returns ------- numpy ndarray A :math:`n \times l` matrix where the :math:`(i, j)`-th entry is the distance between :math:`x[i]` and :math:`y[j]`. Examples -------- >>> import numpy as np >>> import dcor >>> a = np.array([[1, 2, 3, 4], ... [5, 6, 7, 8], ... [9, 10, 11, 12], ... [13, 14, 15, 16]]) >>> b = np.array([[16, 15, 14, 13], ... [12, 11, 10, 9], ... [8, 7, 6, 5], ... [4, 3, 2, 1]]) >>> dcor.distances.pairwise_distances(a) array([[ 0., 8., 16., 24.], [ 8., 0., 8., 16.], [16., 8., 0., 8.], [24., 16., 8., 0.]]) >>> dcor.distances.pairwise_distances(a, b) array([[24.41311123, 16.61324773, 9.16515139, 4.47213595], [16.61324773, 9.16515139, 4.47213595, 9.16515139], [ 9.16515139, 4.47213595, 9.16515139, 16.61324773], [ 4.47213595, 9.16515139, 16.61324773, 24.41311123]])
[ "r", "pairwise_distances", "(", "x", "y", "=", "None", "*", "exponent", "=", "1", ")" ]
python
train
31.689655