repo
stringlengths
7
55
path
stringlengths
4
223
url
stringlengths
87
315
code
stringlengths
75
104k
code_tokens
list
docstring
stringlengths
1
46.9k
docstring_tokens
list
language
stringclasses
1 value
partition
stringclasses
3 values
avg_line_len
float64
7.91
980
SKA-ScienceDataProcessor/integration-prototype
sip/execution_control/master_controller/master_controller_healthcheck.py
https://github.com/SKA-ScienceDataProcessor/integration-prototype/blob/8c8006de6ad71dcd44114b0338780738079c87d4/sip/execution_control/master_controller/master_controller_healthcheck.py#L41-L64
def get_services_health(self) -> dict: """Get the health of all services. Returns: dict, services id and health status """ # Initialise services_health = {} # Get Service IDs services_ids = self._get_services() for service_id in services_ids: service_name = DC.get_service_name(service_id) # Check if the current and actual replica levels are the same if DC.get_replicas(service_id) != \ DC.get_actual_replica(service_id): services_health[service_name] = "Unhealthy" else: services_health[service_name] = "Healthy" return services_health
[ "def", "get_services_health", "(", "self", ")", "->", "dict", ":", "# Initialise", "services_health", "=", "{", "}", "# Get Service IDs", "services_ids", "=", "self", ".", "_get_services", "(", ")", "for", "service_id", "in", "services_ids", ":", "service_name", "=", "DC", ".", "get_service_name", "(", "service_id", ")", "# Check if the current and actual replica levels are the same", "if", "DC", ".", "get_replicas", "(", "service_id", ")", "!=", "DC", ".", "get_actual_replica", "(", "service_id", ")", ":", "services_health", "[", "service_name", "]", "=", "\"Unhealthy\"", "else", ":", "services_health", "[", "service_name", "]", "=", "\"Healthy\"", "return", "services_health" ]
Get the health of all services. Returns: dict, services id and health status
[ "Get", "the", "health", "of", "all", "services", "." ]
python
train
29.333333
treyhunner/django-simple-history
simple_history/management/commands/populate_history.py
https://github.com/treyhunner/django-simple-history/blob/85758ecfe608279508a3fb5b71654d3e202eb63d/simple_history/management/commands/populate_history.py#L113-L158
def _bulk_history_create(self, model, batch_size): """Save a copy of all instances to the historical model. :param model: Model you want to bulk create :param batch_size: number of models to create at once. :return: """ instances = [] history = utils.get_history_manager_for_model(model) if self.verbosity >= 1: self.stdout.write( "Starting bulk creating history models for {} instances {}-{}".format( model, 0, batch_size ) ) iterator_kwargs = ( {"chunk_size": batch_size} if django.VERSION >= (2, 0, 0) else {} ) for index, instance in enumerate( model._default_manager.iterator(**iterator_kwargs) ): # Can't Just pass batch_size to bulk_create as this can lead to # Out of Memory Errors as we load too many models into memory after # creating them. So we only keep batch_size worth of models in # historical_instances and clear them after we hit batch_size if index % batch_size == 0: history.bulk_history_create(instances, batch_size=batch_size) instances = [] if self.verbosity >= 1: self.stdout.write( "Finished bulk creating history models for {} " "instances {}-{}, starting next {}".format( model, index - batch_size, index, batch_size ) ) instances.append(instance) # create any we didn't get in the last loop if instances: history.bulk_history_create(instances, batch_size=batch_size)
[ "def", "_bulk_history_create", "(", "self", ",", "model", ",", "batch_size", ")", ":", "instances", "=", "[", "]", "history", "=", "utils", ".", "get_history_manager_for_model", "(", "model", ")", "if", "self", ".", "verbosity", ">=", "1", ":", "self", ".", "stdout", ".", "write", "(", "\"Starting bulk creating history models for {} instances {}-{}\"", ".", "format", "(", "model", ",", "0", ",", "batch_size", ")", ")", "iterator_kwargs", "=", "(", "{", "\"chunk_size\"", ":", "batch_size", "}", "if", "django", ".", "VERSION", ">=", "(", "2", ",", "0", ",", "0", ")", "else", "{", "}", ")", "for", "index", ",", "instance", "in", "enumerate", "(", "model", ".", "_default_manager", ".", "iterator", "(", "*", "*", "iterator_kwargs", ")", ")", ":", "# Can't Just pass batch_size to bulk_create as this can lead to", "# Out of Memory Errors as we load too many models into memory after", "# creating them. So we only keep batch_size worth of models in", "# historical_instances and clear them after we hit batch_size", "if", "index", "%", "batch_size", "==", "0", ":", "history", ".", "bulk_history_create", "(", "instances", ",", "batch_size", "=", "batch_size", ")", "instances", "=", "[", "]", "if", "self", ".", "verbosity", ">=", "1", ":", "self", ".", "stdout", ".", "write", "(", "\"Finished bulk creating history models for {} \"", "\"instances {}-{}, starting next {}\"", ".", "format", "(", "model", ",", "index", "-", "batch_size", ",", "index", ",", "batch_size", ")", ")", "instances", ".", "append", "(", "instance", ")", "# create any we didn't get in the last loop", "if", "instances", ":", "history", ".", "bulk_history_create", "(", "instances", ",", "batch_size", "=", "batch_size", ")" ]
Save a copy of all instances to the historical model. :param model: Model you want to bulk create :param batch_size: number of models to create at once. :return:
[ "Save", "a", "copy", "of", "all", "instances", "to", "the", "historical", "model", "." ]
python
train
37.869565
AndrewAnnex/SpiceyPy
spiceypy/spiceypy.py
https://github.com/AndrewAnnex/SpiceyPy/blob/fc20a9b9de68b58eed5b332f0c051fb343a6e335/spiceypy/spiceypy.py#L12112-L12126
def spkopa(filename): """ Open an existing SPK file for subsequent write. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkopa_c.html :param filename: The name of an existing SPK file. :type filename: str :return: A handle attached to the SPK file opened to append. :rtype: int """ filename = stypes.stringToCharP(filename) handle = ctypes.c_int() libspice.spkopa_c(filename, ctypes.byref(handle)) return handle.value
[ "def", "spkopa", "(", "filename", ")", ":", "filename", "=", "stypes", ".", "stringToCharP", "(", "filename", ")", "handle", "=", "ctypes", ".", "c_int", "(", ")", "libspice", ".", "spkopa_c", "(", "filename", ",", "ctypes", ".", "byref", "(", "handle", ")", ")", "return", "handle", ".", "value" ]
Open an existing SPK file for subsequent write. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkopa_c.html :param filename: The name of an existing SPK file. :type filename: str :return: A handle attached to the SPK file opened to append. :rtype: int
[ "Open", "an", "existing", "SPK", "file", "for", "subsequent", "write", "." ]
python
train
30.866667
juju/charm-helpers
charmhelpers/contrib/network/ufw.py
https://github.com/juju/charm-helpers/blob/aa785c40c3b7a8c69dbfbc7921d6b9f30142e171/charmhelpers/contrib/network/ufw.py#L238-L287
def modify_access(src, dst='any', port=None, proto=None, action='allow', index=None): """ Grant access to an address or subnet :param src: address (e.g. 192.168.1.234) or subnet (e.g. 192.168.1.0/24). :param dst: destiny of the connection, if the machine has multiple IPs and connections to only one of those have to accepted this is the field has to be set. :param port: destiny port :param proto: protocol (tcp or udp) :param action: `allow` or `delete` :param index: if different from None the rule is inserted at the given `index`. """ if not is_enabled(): hookenv.log('ufw is disabled, skipping modify_access()', level='WARN') return if action == 'delete': cmd = ['ufw', 'delete', 'allow'] elif index is not None: cmd = ['ufw', 'insert', str(index), action] else: cmd = ['ufw', action] if src is not None: cmd += ['from', src] if dst is not None: cmd += ['to', dst] if port is not None: cmd += ['port', str(port)] if proto is not None: cmd += ['proto', proto] hookenv.log('ufw {}: {}'.format(action, ' '.join(cmd)), level='DEBUG') p = subprocess.Popen(cmd, stdout=subprocess.PIPE) (stdout, stderr) = p.communicate() hookenv.log(stdout, level='INFO') if p.returncode != 0: hookenv.log(stderr, level='ERROR') hookenv.log('Error running: {}, exit code: {}'.format(' '.join(cmd), p.returncode), level='ERROR')
[ "def", "modify_access", "(", "src", ",", "dst", "=", "'any'", ",", "port", "=", "None", ",", "proto", "=", "None", ",", "action", "=", "'allow'", ",", "index", "=", "None", ")", ":", "if", "not", "is_enabled", "(", ")", ":", "hookenv", ".", "log", "(", "'ufw is disabled, skipping modify_access()'", ",", "level", "=", "'WARN'", ")", "return", "if", "action", "==", "'delete'", ":", "cmd", "=", "[", "'ufw'", ",", "'delete'", ",", "'allow'", "]", "elif", "index", "is", "not", "None", ":", "cmd", "=", "[", "'ufw'", ",", "'insert'", ",", "str", "(", "index", ")", ",", "action", "]", "else", ":", "cmd", "=", "[", "'ufw'", ",", "action", "]", "if", "src", "is", "not", "None", ":", "cmd", "+=", "[", "'from'", ",", "src", "]", "if", "dst", "is", "not", "None", ":", "cmd", "+=", "[", "'to'", ",", "dst", "]", "if", "port", "is", "not", "None", ":", "cmd", "+=", "[", "'port'", ",", "str", "(", "port", ")", "]", "if", "proto", "is", "not", "None", ":", "cmd", "+=", "[", "'proto'", ",", "proto", "]", "hookenv", ".", "log", "(", "'ufw {}: {}'", ".", "format", "(", "action", ",", "' '", ".", "join", "(", "cmd", ")", ")", ",", "level", "=", "'DEBUG'", ")", "p", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "(", "stdout", ",", "stderr", ")", "=", "p", ".", "communicate", "(", ")", "hookenv", ".", "log", "(", "stdout", ",", "level", "=", "'INFO'", ")", "if", "p", ".", "returncode", "!=", "0", ":", "hookenv", ".", "log", "(", "stderr", ",", "level", "=", "'ERROR'", ")", "hookenv", ".", "log", "(", "'Error running: {}, exit code: {}'", ".", "format", "(", "' '", ".", "join", "(", "cmd", ")", ",", "p", ".", "returncode", ")", ",", "level", "=", "'ERROR'", ")" ]
Grant access to an address or subnet :param src: address (e.g. 192.168.1.234) or subnet (e.g. 192.168.1.0/24). :param dst: destiny of the connection, if the machine has multiple IPs and connections to only one of those have to accepted this is the field has to be set. :param port: destiny port :param proto: protocol (tcp or udp) :param action: `allow` or `delete` :param index: if different from None the rule is inserted at the given `index`.
[ "Grant", "access", "to", "an", "address", "or", "subnet" ]
python
train
32.24
kumar303/mohawk
mohawk/receiver.py
https://github.com/kumar303/mohawk/blob/037be67ccf50ae704705e67add44e02737a65d21/mohawk/receiver.py#L123-L171
def respond(self, content=EmptyValue, content_type=EmptyValue, always_hash_content=True, ext=None): """ Respond to the request. This generates the :attr:`mohawk.Receiver.response_header` attribute. :param content=EmptyValue: Byte string of response body that will be sent. :type content=EmptyValue: str :param content_type=EmptyValue: content-type header value for response. :type content_type=EmptyValue: str :param always_hash_content=True: When True, ``content`` and ``content_type`` must be provided. Read :ref:`skipping-content-checks` to learn more. :type always_hash_content=True: bool :param ext=None: An external `Hawk`_ string. If not None, this value will be signed so that the sender can trust it. :type ext=None: str .. _`Hawk`: https://github.com/hueniverse/hawk """ log.debug('generating response header') resource = Resource(url=self.resource.url, credentials=self.resource.credentials, ext=ext, app=self.parsed_header.get('app', None), dlg=self.parsed_header.get('dlg', None), method=self.resource.method, content=content, content_type=content_type, always_hash_content=always_hash_content, nonce=self.parsed_header['nonce'], timestamp=self.parsed_header['ts']) mac = calculate_mac('response', resource, resource.gen_content_hash()) self.response_header = self._make_header(resource, mac, additional_keys=['ext']) return self.response_header
[ "def", "respond", "(", "self", ",", "content", "=", "EmptyValue", ",", "content_type", "=", "EmptyValue", ",", "always_hash_content", "=", "True", ",", "ext", "=", "None", ")", ":", "log", ".", "debug", "(", "'generating response header'", ")", "resource", "=", "Resource", "(", "url", "=", "self", ".", "resource", ".", "url", ",", "credentials", "=", "self", ".", "resource", ".", "credentials", ",", "ext", "=", "ext", ",", "app", "=", "self", ".", "parsed_header", ".", "get", "(", "'app'", ",", "None", ")", ",", "dlg", "=", "self", ".", "parsed_header", ".", "get", "(", "'dlg'", ",", "None", ")", ",", "method", "=", "self", ".", "resource", ".", "method", ",", "content", "=", "content", ",", "content_type", "=", "content_type", ",", "always_hash_content", "=", "always_hash_content", ",", "nonce", "=", "self", ".", "parsed_header", "[", "'nonce'", "]", ",", "timestamp", "=", "self", ".", "parsed_header", "[", "'ts'", "]", ")", "mac", "=", "calculate_mac", "(", "'response'", ",", "resource", ",", "resource", ".", "gen_content_hash", "(", ")", ")", "self", ".", "response_header", "=", "self", ".", "_make_header", "(", "resource", ",", "mac", ",", "additional_keys", "=", "[", "'ext'", "]", ")", "return", "self", ".", "response_header" ]
Respond to the request. This generates the :attr:`mohawk.Receiver.response_header` attribute. :param content=EmptyValue: Byte string of response body that will be sent. :type content=EmptyValue: str :param content_type=EmptyValue: content-type header value for response. :type content_type=EmptyValue: str :param always_hash_content=True: When True, ``content`` and ``content_type`` must be provided. Read :ref:`skipping-content-checks` to learn more. :type always_hash_content=True: bool :param ext=None: An external `Hawk`_ string. If not None, this value will be signed so that the sender can trust it. :type ext=None: str .. _`Hawk`: https://github.com/hueniverse/hawk
[ "Respond", "to", "the", "request", "." ]
python
train
39.040816
nerdvegas/rez
src/rez/backport/zipfile.py
https://github.com/nerdvegas/rez/blob/1d3b846d53b5b5404edfe8ddb9083f9ceec8c5e7/src/rez/backport/zipfile.py#L801-L806
def printdir(self): """Print a table of contents for the zip file.""" print "%-46s %19s %12s" % ("File Name", "Modified ", "Size") for zinfo in self.filelist: date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6] print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size)
[ "def", "printdir", "(", "self", ")", ":", "print", "\"%-46s %19s %12s\"", "%", "(", "\"File Name\"", ",", "\"Modified \"", ",", "\"Size\"", ")", "for", "zinfo", "in", "self", ".", "filelist", ":", "date", "=", "\"%d-%02d-%02d %02d:%02d:%02d\"", "%", "zinfo", ".", "date_time", "[", ":", "6", "]", "print", "\"%-46s %s %12d\"", "%", "(", "zinfo", ".", "filename", ",", "date", ",", "zinfo", ".", "file_size", ")" ]
Print a table of contents for the zip file.
[ "Print", "a", "table", "of", "contents", "for", "the", "zip", "file", "." ]
python
train
54.5
Esri/ArcREST
src/arcrest/agol/services.py
https://github.com/Esri/ArcREST/blob/ab240fde2b0200f61d4a5f6df033516e53f2f416/src/arcrest/agol/services.py#L2243-L2291
def addFeature(self, features, gdbVersion=None, rollbackOnFailure=True): """ Adds a single feature to the service Inputs: feature - list of common.Feature object or a single common.Feature Object, a FeatureSet object, or a list of dictionary objects gdbVersion - Geodatabase version to apply the edits rollbackOnFailure - Optional parameter to specify if the edits should be applied only if all submitted edits succeed. If false, the server will apply the edits that succeed even if some of the submitted edits fail. If true, the server will apply the edits only if all edits succeed. The default value is true. Output: JSON message as dictionary """ url = self._url + "/addFeatures" params = { "f" : "json" } if gdbVersion is not None: params['gdbVersion'] = gdbVersion if isinstance(rollbackOnFailure, bool): params['rollbackOnFailure'] = rollbackOnFailure if isinstance(features, list) and \ len(features) > 0: if isinstance(features[0], Feature): params['features'] = json.dumps([feature.asDictionary for feature in features], default=_date_handler) elif isinstance(features[0], dict): params['features'] = json.dumps(features, default=_date_handler) elif isinstance(features, Feature): params['features'] = json.dumps([features.asDictionary], default=_date_handler) elif isinstance(features, FeatureSet): params['features'] = json.dumps([feature.asDictionary for feature in features.features], default=_date_handler) else: return None return self._post(url=url, param_dict=params, securityHandler=self._securityHandler, proxy_port=self._proxy_port, proxy_url=self._proxy_url)
[ "def", "addFeature", "(", "self", ",", "features", ",", "gdbVersion", "=", "None", ",", "rollbackOnFailure", "=", "True", ")", ":", "url", "=", "self", ".", "_url", "+", "\"/addFeatures\"", "params", "=", "{", "\"f\"", ":", "\"json\"", "}", "if", "gdbVersion", "is", "not", "None", ":", "params", "[", "'gdbVersion'", "]", "=", "gdbVersion", "if", "isinstance", "(", "rollbackOnFailure", ",", "bool", ")", ":", "params", "[", "'rollbackOnFailure'", "]", "=", "rollbackOnFailure", "if", "isinstance", "(", "features", ",", "list", ")", "and", "len", "(", "features", ")", ">", "0", ":", "if", "isinstance", "(", "features", "[", "0", "]", ",", "Feature", ")", ":", "params", "[", "'features'", "]", "=", "json", ".", "dumps", "(", "[", "feature", ".", "asDictionary", "for", "feature", "in", "features", "]", ",", "default", "=", "_date_handler", ")", "elif", "isinstance", "(", "features", "[", "0", "]", ",", "dict", ")", ":", "params", "[", "'features'", "]", "=", "json", ".", "dumps", "(", "features", ",", "default", "=", "_date_handler", ")", "elif", "isinstance", "(", "features", ",", "Feature", ")", ":", "params", "[", "'features'", "]", "=", "json", ".", "dumps", "(", "[", "features", ".", "asDictionary", "]", ",", "default", "=", "_date_handler", ")", "elif", "isinstance", "(", "features", ",", "FeatureSet", ")", ":", "params", "[", "'features'", "]", "=", "json", ".", "dumps", "(", "[", "feature", ".", "asDictionary", "for", "feature", "in", "features", ".", "features", "]", ",", "default", "=", "_date_handler", ")", "else", ":", "return", "None", "return", "self", ".", "_post", "(", "url", "=", "url", ",", "param_dict", "=", "params", ",", "securityHandler", "=", "self", ".", "_securityHandler", ",", "proxy_port", "=", "self", ".", "_proxy_port", ",", "proxy_url", "=", "self", ".", "_proxy_url", ")" ]
Adds a single feature to the service Inputs: feature - list of common.Feature object or a single common.Feature Object, a FeatureSet object, or a list of dictionary objects gdbVersion - Geodatabase version to apply the edits rollbackOnFailure - Optional parameter to specify if the edits should be applied only if all submitted edits succeed. If false, the server will apply the edits that succeed even if some of the submitted edits fail. If true, the server will apply the edits only if all edits succeed. The default value is true. Output: JSON message as dictionary
[ "Adds", "a", "single", "feature", "to", "the", "service", "Inputs", ":", "feature", "-", "list", "of", "common", ".", "Feature", "object", "or", "a", "single", "common", ".", "Feature", "Object", "a", "FeatureSet", "object", "or", "a", "list", "of", "dictionary", "objects", "gdbVersion", "-", "Geodatabase", "version", "to", "apply", "the", "edits", "rollbackOnFailure", "-", "Optional", "parameter", "to", "specify", "if", "the", "edits", "should", "be", "applied", "only", "if", "all", "submitted", "edits", "succeed", ".", "If", "false", "the", "server", "will", "apply", "the", "edits", "that", "succeed", "even", "if", "some", "of", "the", "submitted", "edits", "fail", ".", "If", "true", "the", "server", "will", "apply", "the", "edits", "only", "if", "all", "edits", "succeed", ".", "The", "default", "value", "is", "true", ".", "Output", ":", "JSON", "message", "as", "dictionary" ]
python
train
50.387755
agoragames/leaderboard-python
leaderboard/leaderboard.py
https://github.com/agoragames/leaderboard-python/blob/ec309859b197a751ac0322374b36d134d8c5522f/leaderboard/leaderboard.py#L1024-L1033
def ranked_in_list(self, members, **options): ''' Retrieve a page of leaders from the leaderboard for a given list of members. @param members [Array] Member names. @param options [Hash] Options to be used when retrieving the page from the leaderboard. @return a page of leaders from the leaderboard for a given list of members. ''' return self.ranked_in_list_in( self.leaderboard_name, members, **options)
[ "def", "ranked_in_list", "(", "self", ",", "members", ",", "*", "*", "options", ")", ":", "return", "self", ".", "ranked_in_list_in", "(", "self", ".", "leaderboard_name", ",", "members", ",", "*", "*", "options", ")" ]
Retrieve a page of leaders from the leaderboard for a given list of members. @param members [Array] Member names. @param options [Hash] Options to be used when retrieving the page from the leaderboard. @return a page of leaders from the leaderboard for a given list of members.
[ "Retrieve", "a", "page", "of", "leaders", "from", "the", "leaderboard", "for", "a", "given", "list", "of", "members", "." ]
python
train
46.5
Spinmob/spinmob
egg/_gui.py
https://github.com/Spinmob/spinmob/blob/f037f5df07f194bcd4a01f4d9916e57b9e8fb45a/egg/_gui.py#L2866-L2876
def _synchronize_controls(self): """ Updates the gui based on button configs. """ # whether the script is visible self.grid_script._widget.setVisible(self.button_script.get_value()) # whether we should be able to edit it. if not self.combo_autoscript.get_index()==0: self.script.disable() else: self.script.enable()
[ "def", "_synchronize_controls", "(", "self", ")", ":", "# whether the script is visible", "self", ".", "grid_script", ".", "_widget", ".", "setVisible", "(", "self", ".", "button_script", ".", "get_value", "(", ")", ")", "# whether we should be able to edit it.", "if", "not", "self", ".", "combo_autoscript", ".", "get_index", "(", ")", "==", "0", ":", "self", ".", "script", ".", "disable", "(", ")", "else", ":", "self", ".", "script", ".", "enable", "(", ")" ]
Updates the gui based on button configs.
[ "Updates", "the", "gui", "based", "on", "button", "configs", "." ]
python
train
37.272727
xapple/fasta
fasta/common.py
https://github.com/xapple/fasta/blob/a827c3138812d555203be45187ffae1277dd0d76/fasta/common.py#L6-L10
def add_dummy_scores(iteratable, score=0): """Add zero scores to all sequences""" for seq in iteratable: seq.letter_annotations["phred_quality"] = (score,)*len(seq) yield seq
[ "def", "add_dummy_scores", "(", "iteratable", ",", "score", "=", "0", ")", ":", "for", "seq", "in", "iteratable", ":", "seq", ".", "letter_annotations", "[", "\"phred_quality\"", "]", "=", "(", "score", ",", ")", "*", "len", "(", "seq", ")", "yield", "seq" ]
Add zero scores to all sequences
[ "Add", "zero", "scores", "to", "all", "sequences" ]
python
train
38.8
StackStorm/pybind
pybind/slxos/v17s_1_02/qos/__init__.py
https://github.com/StackStorm/pybind/blob/44c467e71b2b425be63867aba6e6fa28b2cfe7fb/pybind/slxos/v17s_1_02/qos/__init__.py#L168-L189
def _set_tx_queue(self, v, load=False): """ Setter method for tx_queue, mapped from YANG variable /qos/tx_queue (container) If this variable is read-only (config: false) in the source YANG file, then _set_tx_queue is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_tx_queue() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=tx_queue.tx_queue, is_container='container', presence=False, yang_name="tx-queue", rest_name="tx-queue", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Eegress Queue', u'callpoint': u'qos_transmit_queue', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """tx_queue must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=tx_queue.tx_queue, is_container='container', presence=False, yang_name="tx-queue", rest_name="tx-queue", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Eegress Queue', u'callpoint': u'qos_transmit_queue', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='container', is_config=True)""", }) self.__tx_queue = t if hasattr(self, '_set'): self._set()
[ "def", "_set_tx_queue", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "tx_queue", ".", "tx_queue", ",", "is_container", "=", "'container'", ",", "presence", "=", "False", ",", "yang_name", "=", "\"tx-queue\"", ",", "rest_name", "=", "\"tx-queue\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Configure Eegress Queue'", ",", "u'callpoint'", ":", "u'qos_transmit_queue'", ",", "u'cli-incomplete-no'", ":", "None", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-qos-mls'", ",", "defining_module", "=", "'brocade-qos-mls'", ",", "yang_type", "=", "'container'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"tx_queue must be of a type compatible with container\"\"\"", ",", "'defined-type'", ":", "\"container\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=tx_queue.tx_queue, is_container='container', presence=False, yang_name=\"tx-queue\", rest_name=\"tx-queue\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Eegress Queue', u'callpoint': u'qos_transmit_queue', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='container', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__tx_queue", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
Setter method for tx_queue, mapped from YANG variable /qos/tx_queue (container) If this variable is read-only (config: false) in the source YANG file, then _set_tx_queue is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_tx_queue() directly.
[ "Setter", "method", "for", "tx_queue", "mapped", "from", "YANG", "variable", "/", "qos", "/", "tx_queue", "(", "container", ")", "If", "this", "variable", "is", "read", "-", "only", "(", "config", ":", "false", ")", "in", "the", "source", "YANG", "file", "then", "_set_tx_queue", "is", "considered", "as", "a", "private", "method", ".", "Backends", "looking", "to", "populate", "this", "variable", "should", "do", "so", "via", "calling", "thisObj", ".", "_set_tx_queue", "()", "directly", "." ]
python
train
76.727273
iotile/coretools
scripts/release.py
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/scripts/release.py#L30-L40
def send_slack_message(message): """Send a message to the slack channel #coretools""" if 'SLACK_WEB_HOOK' not in os.environ: raise EnvironmentError("Could not find SLACK_WEB_HOOK environment variable") webhook = os.environ['SLACK_WEB_HOOK'] r = requests.post(webhook, json={'text':message, 'username': 'Release Bot'}) if r.status_code != 200: raise RuntimeError("Could not post message to slack channel")
[ "def", "send_slack_message", "(", "message", ")", ":", "if", "'SLACK_WEB_HOOK'", "not", "in", "os", ".", "environ", ":", "raise", "EnvironmentError", "(", "\"Could not find SLACK_WEB_HOOK environment variable\"", ")", "webhook", "=", "os", ".", "environ", "[", "'SLACK_WEB_HOOK'", "]", "r", "=", "requests", ".", "post", "(", "webhook", ",", "json", "=", "{", "'text'", ":", "message", ",", "'username'", ":", "'Release Bot'", "}", ")", "if", "r", ".", "status_code", "!=", "200", ":", "raise", "RuntimeError", "(", "\"Could not post message to slack channel\"", ")" ]
Send a message to the slack channel #coretools
[ "Send", "a", "message", "to", "the", "slack", "channel", "#coretools" ]
python
train
39.363636
klen/pylama
pylama/main.py
https://github.com/klen/pylama/blob/f436ccc6b55b33381a295ded753e467953cf4379/pylama/main.py#L82-L101
def process_paths(options, candidates=None, error=True): """Process files and log errors.""" errors = check_path(options, rootdir=CURDIR, candidates=candidates) if options.format in ['pycodestyle', 'pep8']: pattern = "%(filename)s:%(lnum)s:%(col)s: %(text)s" elif options.format == 'pylint': pattern = "%(filename)s:%(lnum)s: [%(type)s] %(text)s" else: # 'parsable' pattern = "%(filename)s:%(lnum)s:%(col)s: [%(type)s] %(text)s" for er in errors: if options.abspath: er._info['filename'] = op.abspath(er.filename) LOGGER.warning(pattern, er._info) if error: sys.exit(int(bool(errors))) return errors
[ "def", "process_paths", "(", "options", ",", "candidates", "=", "None", ",", "error", "=", "True", ")", ":", "errors", "=", "check_path", "(", "options", ",", "rootdir", "=", "CURDIR", ",", "candidates", "=", "candidates", ")", "if", "options", ".", "format", "in", "[", "'pycodestyle'", ",", "'pep8'", "]", ":", "pattern", "=", "\"%(filename)s:%(lnum)s:%(col)s: %(text)s\"", "elif", "options", ".", "format", "==", "'pylint'", ":", "pattern", "=", "\"%(filename)s:%(lnum)s: [%(type)s] %(text)s\"", "else", ":", "# 'parsable'", "pattern", "=", "\"%(filename)s:%(lnum)s:%(col)s: [%(type)s] %(text)s\"", "for", "er", "in", "errors", ":", "if", "options", ".", "abspath", ":", "er", ".", "_info", "[", "'filename'", "]", "=", "op", ".", "abspath", "(", "er", ".", "filename", ")", "LOGGER", ".", "warning", "(", "pattern", ",", "er", ".", "_info", ")", "if", "error", ":", "sys", ".", "exit", "(", "int", "(", "bool", "(", "errors", ")", ")", ")", "return", "errors" ]
Process files and log errors.
[ "Process", "files", "and", "log", "errors", "." ]
python
train
33.85
pandas-dev/pandas
pandas/core/arrays/datetimelike.py
https://github.com/pandas-dev/pandas/blob/9feb3ad92cc0397a04b665803a49299ee7aa1037/pandas/core/arrays/datetimelike.py#L986-L1007
def _add_delta_tdi(self, other): """ Add a delta of a TimedeltaIndex return the i8 result view """ if len(self) != len(other): raise ValueError("cannot add indices of unequal length") if isinstance(other, np.ndarray): # ndarray[timedelta64]; wrap in TimedeltaIndex for op from pandas import TimedeltaIndex other = TimedeltaIndex(other) self_i8 = self.asi8 other_i8 = other.asi8 new_values = checked_add_with_arr(self_i8, other_i8, arr_mask=self._isnan, b_mask=other._isnan) if self._hasnans or other._hasnans: mask = (self._isnan) | (other._isnan) new_values[mask] = iNaT return new_values.view('i8')
[ "def", "_add_delta_tdi", "(", "self", ",", "other", ")", ":", "if", "len", "(", "self", ")", "!=", "len", "(", "other", ")", ":", "raise", "ValueError", "(", "\"cannot add indices of unequal length\"", ")", "if", "isinstance", "(", "other", ",", "np", ".", "ndarray", ")", ":", "# ndarray[timedelta64]; wrap in TimedeltaIndex for op", "from", "pandas", "import", "TimedeltaIndex", "other", "=", "TimedeltaIndex", "(", "other", ")", "self_i8", "=", "self", ".", "asi8", "other_i8", "=", "other", ".", "asi8", "new_values", "=", "checked_add_with_arr", "(", "self_i8", ",", "other_i8", ",", "arr_mask", "=", "self", ".", "_isnan", ",", "b_mask", "=", "other", ".", "_isnan", ")", "if", "self", ".", "_hasnans", "or", "other", ".", "_hasnans", ":", "mask", "=", "(", "self", ".", "_isnan", ")", "|", "(", "other", ".", "_isnan", ")", "new_values", "[", "mask", "]", "=", "iNaT", "return", "new_values", ".", "view", "(", "'i8'", ")" ]
Add a delta of a TimedeltaIndex return the i8 result view
[ "Add", "a", "delta", "of", "a", "TimedeltaIndex", "return", "the", "i8", "result", "view" ]
python
train
37.5
rwl/pylon
pylon/solver.py
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/solver.py#L726-L732
def _consfcn(self, x): """ Evaluates nonlinear constraints and their Jacobian for OPF. """ h, g = self._gh(x) dh, dg = self._dgh(x) return h, g, dh, dg
[ "def", "_consfcn", "(", "self", ",", "x", ")", ":", "h", ",", "g", "=", "self", ".", "_gh", "(", "x", ")", "dh", ",", "dg", "=", "self", ".", "_dgh", "(", "x", ")", "return", "h", ",", "g", ",", "dh", ",", "dg" ]
Evaluates nonlinear constraints and their Jacobian for OPF.
[ "Evaluates", "nonlinear", "constraints", "and", "their", "Jacobian", "for", "OPF", "." ]
python
train
26.571429
mbarakaja/braulio
braulio/cli.py
https://github.com/mbarakaja/braulio/blob/70ab6f0dd631ef78c4da1b39d1c6fb6f9a995d2b/braulio/cli.py#L94-L107
def bump_option_validator(ctx, param, value): """In case a value is provided checks that it is a valid version string. If is not thrown :class:`click.UsageError`. Return a :class:`~braulio.version.Version` object or **None**. """ if value: try: value = Version(value) except ValueError: ctx.fail(f"{x_mark} {value} is not a valid version string") return value
[ "def", "bump_option_validator", "(", "ctx", ",", "param", ",", "value", ")", ":", "if", "value", ":", "try", ":", "value", "=", "Version", "(", "value", ")", "except", "ValueError", ":", "ctx", ".", "fail", "(", "f\"{x_mark} {value} is not a valid version string\"", ")", "return", "value" ]
In case a value is provided checks that it is a valid version string. If is not thrown :class:`click.UsageError`. Return a :class:`~braulio.version.Version` object or **None**.
[ "In", "case", "a", "value", "is", "provided", "checks", "that", "it", "is", "a", "valid", "version", "string", ".", "If", "is", "not", "thrown", ":", "class", ":", "click", ".", "UsageError", "." ]
python
train
29.5
HumanCellAtlas/dcp-cli
hca/util/fs_helper.py
https://github.com/HumanCellAtlas/dcp-cli/blob/cc70817bc4e50944c709eaae160de0bf7a19f0f3/hca/util/fs_helper.py#L17-L24
def get_days_since_last_modified(filename): """ :param filename: Absolute file path :return: Number of days since filename's last modified time """ now = datetime.now() last_modified = datetime.fromtimestamp(os.path.getmtime(filename)) return (now - last_modified).days
[ "def", "get_days_since_last_modified", "(", "filename", ")", ":", "now", "=", "datetime", ".", "now", "(", ")", "last_modified", "=", "datetime", ".", "fromtimestamp", "(", "os", ".", "path", ".", "getmtime", "(", "filename", ")", ")", "return", "(", "now", "-", "last_modified", ")", ".", "days" ]
:param filename: Absolute file path :return: Number of days since filename's last modified time
[ ":", "param", "filename", ":", "Absolute", "file", "path", ":", "return", ":", "Number", "of", "days", "since", "filename", "s", "last", "modified", "time" ]
python
train
39.75
jobovy/galpy
galpy/df/quasiisothermaldf.py
https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/df/quasiisothermaldf.py#L577-L749
def _vmomentdensity(self,R,z,n,m,o,nsigma=None,mc=False,nmc=10000, _returnmc=False,_vrs=None,_vts=None,_vzs=None, _rawgausssamples=False, gl=False,ngl=_DEFAULTNGL,_returngl=False,_glqeval=None, _return_actions=False,_jr=None,_lz=None,_jz=None, _return_freqs=False, _rg=None,_kappa=None,_nu=None,_Omega=None, _sigmaR1=None,_sigmaz1=None, **kwargs): """Non-physical version of vmomentdensity, otherwise the same""" if isinstance(R,numpy.ndarray): return numpy.array([self._vmomentdensity(r,zz,n,m,o,nsigma=nsigma, mc=mc,nmc=nmc, gl=gl,ngl=ngl,**kwargs) for r,zz in zip(R,z)]) if isinstance(self._aA,(actionAngle.actionAngleAdiabatic, actionAngle.actionAngleAdiabaticGrid)): if n % 2 == 1. or o % 2 == 1.: return 0. #we know this must be the case if nsigma == None: nsigma= _NSIGMA if _sigmaR1 is None: sigmaR1= self._sr*numpy.exp((self._refr-R)/self._hsr) else: sigmaR1= _sigmaR1 if _sigmaz1 is None: sigmaz1= self._sz*numpy.exp((self._refr-R)/self._hsz) else: sigmaz1= _sigmaz1 thisvc= potential.vcirc(self._pot,R,use_physical=False) #Use the asymmetric drift equation to estimate va gamma= numpy.sqrt(0.5) va= sigmaR1**2./2./thisvc\ *(gamma**2.-1. #Assume close to flat rotation curve, sigphi2/sigR2 =~ 0.5 +R*(1./self._hr+2./self._hsr)) if math.fabs(va) > sigmaR1: va = 0.#To avoid craziness near the center if gl: if ngl % 2 == 1: raise ValueError("ngl must be even") if not _glqeval is None and ngl != _glqeval.shape[0]: _glqeval= None #Use Gauss-Legendre integration for all if ngl == _DEFAULTNGL: glx, glw= self._glxdef, self._glwdef glx12, glw12= self._glxdef12, self._glwdef12 elif ngl == _DEFAULTNGL2: glx, glw= self._glxdef2, self._glwdef2 glx12, glw12= self._glxdef, self._glwdef else: glx, glw= numpy.polynomial.legendre.leggauss(ngl) glx12, glw12= numpy.polynomial.legendre.leggauss(ngl//2) #Evaluate everywhere if isinstance(self._aA,(actionAngle.actionAngleAdiabatic, actionAngle.actionAngleAdiabaticGrid)): vRgl= nsigma*sigmaR1/2.*(glx+1.) vzgl= nsigma*sigmaz1/2.*(glx+1.) vRglw= glw vzglw= glw else: vRgl= nsigma*sigmaR1/2.*(glx12+1.) #vRgl= 1.5/2.*(glx12+1.) vRgl= list(vRgl) vRgl.extend(-nsigma*sigmaR1/2.*(glx12+1.)) #vRgl.extend(-1.5/2.*(glx12+1.)) vRgl= numpy.array(vRgl) vzgl= nsigma*sigmaz1/2.*(glx12+1.) #vzgl= 1.5/2.*(glx12+1.) vzgl= list(vzgl) vzgl.extend(-nsigma*sigmaz1/2.*(glx12+1.)) #vzgl.extend(-1.5/2.*(glx12+1.)) vzgl= numpy.array(vzgl) vRglw= glw12 vRglw= list(vRglw) vRglw.extend(glw12) vRglw= numpy.array(vRglw) vzglw= glw12 vzglw= list(vzglw) vzglw.extend(glw12) vzglw= numpy.array(vzglw) if 'vTmax' in kwargs: vTmax = kwargs['vTmax'] else: vTmax = 1.5 vTgl= vTmax/2.*(glx+1.) #Tile everything vTgl= numpy.tile(vTgl,(ngl,ngl,1)).T vRgl= numpy.tile(numpy.reshape(vRgl,(1,ngl)).T,(ngl,1,ngl)) vzgl= numpy.tile(vzgl,(ngl,ngl,1)) vTglw= numpy.tile(glw,(ngl,ngl,1)).T #also tile weights vRglw= numpy.tile(numpy.reshape(vRglw,(1,ngl)).T,(ngl,1,ngl)) vzglw= numpy.tile(vzglw,(ngl,ngl,1)) #evaluate if _glqeval is None and _jr is None: logqeval, jr, lz, jz, rg, kappa, nu, Omega= self(R+numpy.zeros(ngl*ngl*ngl), vRgl.flatten(), vTgl.flatten(), z+numpy.zeros(ngl*ngl*ngl), vzgl.flatten(), log=True, _return_actions=True, _return_freqs=True, use_physical=False) logqeval= numpy.reshape(logqeval,(ngl,ngl,ngl)) elif not _jr is None and _rg is None: logqeval, jr, lz, jz, rg, kappa, nu, Omega= self((_jr,_lz,_jz), log=True, _return_actions=True, _return_freqs=True, use_physical=False) logqeval= numpy.reshape(logqeval,(ngl,ngl,ngl)) elif not _jr is None and not _rg is None: logqeval, jr, lz, jz, rg, kappa, nu, Omega= self((_jr,_lz,_jz), rg=_rg,kappa=_kappa,nu=_nu, Omega=_Omega, log=True, _return_actions=True, _return_freqs=True, use_physical=False) logqeval= numpy.reshape(logqeval,(ngl,ngl,ngl)) else: logqeval= _glqeval if _returngl: return (numpy.sum(numpy.exp(logqeval)*vRgl**n*vTgl**m*vzgl**o *vTglw*vRglw*vzglw)*sigmaR1*sigmaz1*0.125*vTmax*nsigma**2, logqeval) elif _return_actions and _return_freqs: return (numpy.sum(numpy.exp(logqeval)*vRgl**n*vTgl**m*vzgl**o *vTglw*vRglw*vzglw)*sigmaR1*sigmaz1*0.125*vTmax*nsigma**2, jr,lz,jz, rg,kappa,nu,Omega) elif _return_actions: return (numpy.sum(numpy.exp(logqeval)*vRgl**n*vTgl**m*vzgl**o *vTglw*vRglw*vzglw)*sigmaR1*sigmaz1*0.125*vTmax*nsigma**2, jr,lz,jz) else: return numpy.sum(numpy.exp(logqeval)*vRgl**n*vTgl**m*vzgl**o *vTglw*vRglw*vzglw*sigmaR1*sigmaz1*0.125*vTmax*nsigma**2) elif mc: mvT= (thisvc-va)/gamma/sigmaR1 if _vrs is None: vrs= numpy.random.normal(size=nmc) else: vrs= _vrs if _vts is None: vts= numpy.random.normal(size=nmc)+mvT else: if _rawgausssamples: vts= _vts+mvT else: vts= _vts if _vzs is None: vzs= numpy.random.normal(size=nmc) else: vzs= _vzs Is= _vmomentsurfaceMCIntegrand(vzs,vrs,vts,numpy.ones(nmc)*R, numpy.ones(nmc)*z, self,sigmaR1,gamma,sigmaz1,mvT, n,m,o) if _returnmc: if _rawgausssamples: return (numpy.mean(Is)*sigmaR1**(2.+n+m)*gamma**(1.+m)*sigmaz1**(1.+o), vrs,vts-mvT,vzs) else: return (numpy.mean(Is)*sigmaR1**(2.+n+m)*gamma**(1.+m)*sigmaz1**(1.+o), vrs,vts,vzs) else: return numpy.mean(Is)*sigmaR1**(2.+n+m)*gamma**(1.+m)*sigmaz1**(1.+o) else: #pragma: no cover because this is too slow; a warning is shown warnings.warn("Calculations using direct numerical integration using tplquad is not recommended and extremely slow; it has also not been carefully tested",galpyWarning) return integrate.tplquad(_vmomentsurfaceIntegrand, 1./gamma*(thisvc-va)/sigmaR1-nsigma, 1./gamma*(thisvc-va)/sigmaR1+nsigma, lambda x: 0., lambda x: nsigma, lambda x,y: 0., lambda x,y: nsigma, (R,z,self,sigmaR1,gamma,sigmaz1,n,m,o), **kwargs)[0]*sigmaR1**(2.+n+m)*gamma**(1.+m)*sigmaz1**(1.+o)
[ "def", "_vmomentdensity", "(", "self", ",", "R", ",", "z", ",", "n", ",", "m", ",", "o", ",", "nsigma", "=", "None", ",", "mc", "=", "False", ",", "nmc", "=", "10000", ",", "_returnmc", "=", "False", ",", "_vrs", "=", "None", ",", "_vts", "=", "None", ",", "_vzs", "=", "None", ",", "_rawgausssamples", "=", "False", ",", "gl", "=", "False", ",", "ngl", "=", "_DEFAULTNGL", ",", "_returngl", "=", "False", ",", "_glqeval", "=", "None", ",", "_return_actions", "=", "False", ",", "_jr", "=", "None", ",", "_lz", "=", "None", ",", "_jz", "=", "None", ",", "_return_freqs", "=", "False", ",", "_rg", "=", "None", ",", "_kappa", "=", "None", ",", "_nu", "=", "None", ",", "_Omega", "=", "None", ",", "_sigmaR1", "=", "None", ",", "_sigmaz1", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "R", ",", "numpy", ".", "ndarray", ")", ":", "return", "numpy", ".", "array", "(", "[", "self", ".", "_vmomentdensity", "(", "r", ",", "zz", ",", "n", ",", "m", ",", "o", ",", "nsigma", "=", "nsigma", ",", "mc", "=", "mc", ",", "nmc", "=", "nmc", ",", "gl", "=", "gl", ",", "ngl", "=", "ngl", ",", "*", "*", "kwargs", ")", "for", "r", ",", "zz", "in", "zip", "(", "R", ",", "z", ")", "]", ")", "if", "isinstance", "(", "self", ".", "_aA", ",", "(", "actionAngle", ".", "actionAngleAdiabatic", ",", "actionAngle", ".", "actionAngleAdiabaticGrid", ")", ")", ":", "if", "n", "%", "2", "==", "1.", "or", "o", "%", "2", "==", "1.", ":", "return", "0.", "#we know this must be the case", "if", "nsigma", "==", "None", ":", "nsigma", "=", "_NSIGMA", "if", "_sigmaR1", "is", "None", ":", "sigmaR1", "=", "self", ".", "_sr", "*", "numpy", ".", "exp", "(", "(", "self", ".", "_refr", "-", "R", ")", "/", "self", ".", "_hsr", ")", "else", ":", "sigmaR1", "=", "_sigmaR1", "if", "_sigmaz1", "is", "None", ":", "sigmaz1", "=", "self", ".", "_sz", "*", "numpy", ".", "exp", "(", "(", "self", ".", "_refr", "-", "R", ")", "/", "self", ".", "_hsz", ")", "else", ":", "sigmaz1", "=", "_sigmaz1", "thisvc", "=", "potential", ".", "vcirc", "(", "self", ".", "_pot", ",", "R", ",", "use_physical", "=", "False", ")", "#Use the asymmetric drift equation to estimate va", "gamma", "=", "numpy", ".", "sqrt", "(", "0.5", ")", "va", "=", "sigmaR1", "**", "2.", "/", "2.", "/", "thisvc", "*", "(", "gamma", "**", "2.", "-", "1.", "#Assume close to flat rotation curve, sigphi2/sigR2 =~ 0.5", "+", "R", "*", "(", "1.", "/", "self", ".", "_hr", "+", "2.", "/", "self", ".", "_hsr", ")", ")", "if", "math", ".", "fabs", "(", "va", ")", ">", "sigmaR1", ":", "va", "=", "0.", "#To avoid craziness near the center", "if", "gl", ":", "if", "ngl", "%", "2", "==", "1", ":", "raise", "ValueError", "(", "\"ngl must be even\"", ")", "if", "not", "_glqeval", "is", "None", "and", "ngl", "!=", "_glqeval", ".", "shape", "[", "0", "]", ":", "_glqeval", "=", "None", "#Use Gauss-Legendre integration for all", "if", "ngl", "==", "_DEFAULTNGL", ":", "glx", ",", "glw", "=", "self", ".", "_glxdef", ",", "self", ".", "_glwdef", "glx12", ",", "glw12", "=", "self", ".", "_glxdef12", ",", "self", ".", "_glwdef12", "elif", "ngl", "==", "_DEFAULTNGL2", ":", "glx", ",", "glw", "=", "self", ".", "_glxdef2", ",", "self", ".", "_glwdef2", "glx12", ",", "glw12", "=", "self", ".", "_glxdef", ",", "self", ".", "_glwdef", "else", ":", "glx", ",", "glw", "=", "numpy", ".", "polynomial", ".", "legendre", ".", "leggauss", "(", "ngl", ")", "glx12", ",", "glw12", "=", "numpy", ".", "polynomial", ".", "legendre", ".", "leggauss", "(", "ngl", "//", "2", ")", "#Evaluate everywhere", "if", "isinstance", "(", "self", ".", "_aA", ",", "(", "actionAngle", ".", "actionAngleAdiabatic", ",", "actionAngle", ".", "actionAngleAdiabaticGrid", ")", ")", ":", "vRgl", "=", "nsigma", "*", "sigmaR1", "/", "2.", "*", "(", "glx", "+", "1.", ")", "vzgl", "=", "nsigma", "*", "sigmaz1", "/", "2.", "*", "(", "glx", "+", "1.", ")", "vRglw", "=", "glw", "vzglw", "=", "glw", "else", ":", "vRgl", "=", "nsigma", "*", "sigmaR1", "/", "2.", "*", "(", "glx12", "+", "1.", ")", "#vRgl= 1.5/2.*(glx12+1.)", "vRgl", "=", "list", "(", "vRgl", ")", "vRgl", ".", "extend", "(", "-", "nsigma", "*", "sigmaR1", "/", "2.", "*", "(", "glx12", "+", "1.", ")", ")", "#vRgl.extend(-1.5/2.*(glx12+1.))", "vRgl", "=", "numpy", ".", "array", "(", "vRgl", ")", "vzgl", "=", "nsigma", "*", "sigmaz1", "/", "2.", "*", "(", "glx12", "+", "1.", ")", "#vzgl= 1.5/2.*(glx12+1.)", "vzgl", "=", "list", "(", "vzgl", ")", "vzgl", ".", "extend", "(", "-", "nsigma", "*", "sigmaz1", "/", "2.", "*", "(", "glx12", "+", "1.", ")", ")", "#vzgl.extend(-1.5/2.*(glx12+1.))", "vzgl", "=", "numpy", ".", "array", "(", "vzgl", ")", "vRglw", "=", "glw12", "vRglw", "=", "list", "(", "vRglw", ")", "vRglw", ".", "extend", "(", "glw12", ")", "vRglw", "=", "numpy", ".", "array", "(", "vRglw", ")", "vzglw", "=", "glw12", "vzglw", "=", "list", "(", "vzglw", ")", "vzglw", ".", "extend", "(", "glw12", ")", "vzglw", "=", "numpy", ".", "array", "(", "vzglw", ")", "if", "'vTmax'", "in", "kwargs", ":", "vTmax", "=", "kwargs", "[", "'vTmax'", "]", "else", ":", "vTmax", "=", "1.5", "vTgl", "=", "vTmax", "/", "2.", "*", "(", "glx", "+", "1.", ")", "#Tile everything", "vTgl", "=", "numpy", ".", "tile", "(", "vTgl", ",", "(", "ngl", ",", "ngl", ",", "1", ")", ")", ".", "T", "vRgl", "=", "numpy", ".", "tile", "(", "numpy", ".", "reshape", "(", "vRgl", ",", "(", "1", ",", "ngl", ")", ")", ".", "T", ",", "(", "ngl", ",", "1", ",", "ngl", ")", ")", "vzgl", "=", "numpy", ".", "tile", "(", "vzgl", ",", "(", "ngl", ",", "ngl", ",", "1", ")", ")", "vTglw", "=", "numpy", ".", "tile", "(", "glw", ",", "(", "ngl", ",", "ngl", ",", "1", ")", ")", ".", "T", "#also tile weights", "vRglw", "=", "numpy", ".", "tile", "(", "numpy", ".", "reshape", "(", "vRglw", ",", "(", "1", ",", "ngl", ")", ")", ".", "T", ",", "(", "ngl", ",", "1", ",", "ngl", ")", ")", "vzglw", "=", "numpy", ".", "tile", "(", "vzglw", ",", "(", "ngl", ",", "ngl", ",", "1", ")", ")", "#evaluate", "if", "_glqeval", "is", "None", "and", "_jr", "is", "None", ":", "logqeval", ",", "jr", ",", "lz", ",", "jz", ",", "rg", ",", "kappa", ",", "nu", ",", "Omega", "=", "self", "(", "R", "+", "numpy", ".", "zeros", "(", "ngl", "*", "ngl", "*", "ngl", ")", ",", "vRgl", ".", "flatten", "(", ")", ",", "vTgl", ".", "flatten", "(", ")", ",", "z", "+", "numpy", ".", "zeros", "(", "ngl", "*", "ngl", "*", "ngl", ")", ",", "vzgl", ".", "flatten", "(", ")", ",", "log", "=", "True", ",", "_return_actions", "=", "True", ",", "_return_freqs", "=", "True", ",", "use_physical", "=", "False", ")", "logqeval", "=", "numpy", ".", "reshape", "(", "logqeval", ",", "(", "ngl", ",", "ngl", ",", "ngl", ")", ")", "elif", "not", "_jr", "is", "None", "and", "_rg", "is", "None", ":", "logqeval", ",", "jr", ",", "lz", ",", "jz", ",", "rg", ",", "kappa", ",", "nu", ",", "Omega", "=", "self", "(", "(", "_jr", ",", "_lz", ",", "_jz", ")", ",", "log", "=", "True", ",", "_return_actions", "=", "True", ",", "_return_freqs", "=", "True", ",", "use_physical", "=", "False", ")", "logqeval", "=", "numpy", ".", "reshape", "(", "logqeval", ",", "(", "ngl", ",", "ngl", ",", "ngl", ")", ")", "elif", "not", "_jr", "is", "None", "and", "not", "_rg", "is", "None", ":", "logqeval", ",", "jr", ",", "lz", ",", "jz", ",", "rg", ",", "kappa", ",", "nu", ",", "Omega", "=", "self", "(", "(", "_jr", ",", "_lz", ",", "_jz", ")", ",", "rg", "=", "_rg", ",", "kappa", "=", "_kappa", ",", "nu", "=", "_nu", ",", "Omega", "=", "_Omega", ",", "log", "=", "True", ",", "_return_actions", "=", "True", ",", "_return_freqs", "=", "True", ",", "use_physical", "=", "False", ")", "logqeval", "=", "numpy", ".", "reshape", "(", "logqeval", ",", "(", "ngl", ",", "ngl", ",", "ngl", ")", ")", "else", ":", "logqeval", "=", "_glqeval", "if", "_returngl", ":", "return", "(", "numpy", ".", "sum", "(", "numpy", ".", "exp", "(", "logqeval", ")", "*", "vRgl", "**", "n", "*", "vTgl", "**", "m", "*", "vzgl", "**", "o", "*", "vTglw", "*", "vRglw", "*", "vzglw", ")", "*", "sigmaR1", "*", "sigmaz1", "*", "0.125", "*", "vTmax", "*", "nsigma", "**", "2", ",", "logqeval", ")", "elif", "_return_actions", "and", "_return_freqs", ":", "return", "(", "numpy", ".", "sum", "(", "numpy", ".", "exp", "(", "logqeval", ")", "*", "vRgl", "**", "n", "*", "vTgl", "**", "m", "*", "vzgl", "**", "o", "*", "vTglw", "*", "vRglw", "*", "vzglw", ")", "*", "sigmaR1", "*", "sigmaz1", "*", "0.125", "*", "vTmax", "*", "nsigma", "**", "2", ",", "jr", ",", "lz", ",", "jz", ",", "rg", ",", "kappa", ",", "nu", ",", "Omega", ")", "elif", "_return_actions", ":", "return", "(", "numpy", ".", "sum", "(", "numpy", ".", "exp", "(", "logqeval", ")", "*", "vRgl", "**", "n", "*", "vTgl", "**", "m", "*", "vzgl", "**", "o", "*", "vTglw", "*", "vRglw", "*", "vzglw", ")", "*", "sigmaR1", "*", "sigmaz1", "*", "0.125", "*", "vTmax", "*", "nsigma", "**", "2", ",", "jr", ",", "lz", ",", "jz", ")", "else", ":", "return", "numpy", ".", "sum", "(", "numpy", ".", "exp", "(", "logqeval", ")", "*", "vRgl", "**", "n", "*", "vTgl", "**", "m", "*", "vzgl", "**", "o", "*", "vTglw", "*", "vRglw", "*", "vzglw", "*", "sigmaR1", "*", "sigmaz1", "*", "0.125", "*", "vTmax", "*", "nsigma", "**", "2", ")", "elif", "mc", ":", "mvT", "=", "(", "thisvc", "-", "va", ")", "/", "gamma", "/", "sigmaR1", "if", "_vrs", "is", "None", ":", "vrs", "=", "numpy", ".", "random", ".", "normal", "(", "size", "=", "nmc", ")", "else", ":", "vrs", "=", "_vrs", "if", "_vts", "is", "None", ":", "vts", "=", "numpy", ".", "random", ".", "normal", "(", "size", "=", "nmc", ")", "+", "mvT", "else", ":", "if", "_rawgausssamples", ":", "vts", "=", "_vts", "+", "mvT", "else", ":", "vts", "=", "_vts", "if", "_vzs", "is", "None", ":", "vzs", "=", "numpy", ".", "random", ".", "normal", "(", "size", "=", "nmc", ")", "else", ":", "vzs", "=", "_vzs", "Is", "=", "_vmomentsurfaceMCIntegrand", "(", "vzs", ",", "vrs", ",", "vts", ",", "numpy", ".", "ones", "(", "nmc", ")", "*", "R", ",", "numpy", ".", "ones", "(", "nmc", ")", "*", "z", ",", "self", ",", "sigmaR1", ",", "gamma", ",", "sigmaz1", ",", "mvT", ",", "n", ",", "m", ",", "o", ")", "if", "_returnmc", ":", "if", "_rawgausssamples", ":", "return", "(", "numpy", ".", "mean", "(", "Is", ")", "*", "sigmaR1", "**", "(", "2.", "+", "n", "+", "m", ")", "*", "gamma", "**", "(", "1.", "+", "m", ")", "*", "sigmaz1", "**", "(", "1.", "+", "o", ")", ",", "vrs", ",", "vts", "-", "mvT", ",", "vzs", ")", "else", ":", "return", "(", "numpy", ".", "mean", "(", "Is", ")", "*", "sigmaR1", "**", "(", "2.", "+", "n", "+", "m", ")", "*", "gamma", "**", "(", "1.", "+", "m", ")", "*", "sigmaz1", "**", "(", "1.", "+", "o", ")", ",", "vrs", ",", "vts", ",", "vzs", ")", "else", ":", "return", "numpy", ".", "mean", "(", "Is", ")", "*", "sigmaR1", "**", "(", "2.", "+", "n", "+", "m", ")", "*", "gamma", "**", "(", "1.", "+", "m", ")", "*", "sigmaz1", "**", "(", "1.", "+", "o", ")", "else", ":", "#pragma: no cover because this is too slow; a warning is shown", "warnings", ".", "warn", "(", "\"Calculations using direct numerical integration using tplquad is not recommended and extremely slow; it has also not been carefully tested\"", ",", "galpyWarning", ")", "return", "integrate", ".", "tplquad", "(", "_vmomentsurfaceIntegrand", ",", "1.", "/", "gamma", "*", "(", "thisvc", "-", "va", ")", "/", "sigmaR1", "-", "nsigma", ",", "1.", "/", "gamma", "*", "(", "thisvc", "-", "va", ")", "/", "sigmaR1", "+", "nsigma", ",", "lambda", "x", ":", "0.", ",", "lambda", "x", ":", "nsigma", ",", "lambda", "x", ",", "y", ":", "0.", ",", "lambda", "x", ",", "y", ":", "nsigma", ",", "(", "R", ",", "z", ",", "self", ",", "sigmaR1", ",", "gamma", ",", "sigmaz1", ",", "n", ",", "m", ",", "o", ")", ",", "*", "*", "kwargs", ")", "[", "0", "]", "*", "sigmaR1", "**", "(", "2.", "+", "n", "+", "m", ")", "*", "gamma", "**", "(", "1.", "+", "m", ")", "*", "sigmaz1", "**", "(", "1.", "+", "o", ")" ]
Non-physical version of vmomentdensity, otherwise the same
[ "Non", "-", "physical", "version", "of", "vmomentdensity", "otherwise", "the", "same" ]
python
train
51.768786
cloudsmith-io/cloudsmith-cli
cloudsmith_cli/cli/commands/copy.py
https://github.com/cloudsmith-io/cloudsmith-cli/blob/5bc245ca5d0bfa85380be48e7c206b4c86cc6c8e/cloudsmith_cli/cli/commands/copy.py#L28-L94
def copy( ctx, opts, owner_repo_package, destination, skip_errors, wait_interval, no_wait_for_sync, sync_attempts, ): """ Copy a package to another repository. This requires appropriate permissions for both the source repository/package and the destination repository. - OWNER/REPO/PACKAGE: Specify the OWNER namespace (i.e. user or org), the REPO name where the package is stored, and the PACKAGE name (slug) of the package itself. All separated by a slash. Example: 'your-org/awesome-repo/better-pkg'. - DEST: Specify the DEST (destination) repository to copy the package to. This *must* be in the same namespace as the source repository. Example: 'other-repo' Full CLI example: $ cloudsmith cp your-org/awesome-repo/better-pkg other-repo """ owner, source, slug = owner_repo_package click.echo( "Copying %(slug)s package from %(source)s to %(dest)s ... " % { "slug": click.style(slug, bold=True), "source": click.style(source, bold=True), "dest": click.style(destination, bold=True), }, nl=False, ) context_msg = "Failed to copy package!" with handle_api_exceptions( ctx, opts=opts, context_msg=context_msg, reraise_on_error=skip_errors ): with maybe_spinner(opts): _, new_slug = copy_package( owner=owner, repo=source, identifier=slug, destination=destination ) click.secho("OK", fg="green") if no_wait_for_sync: return wait_for_package_sync( ctx=ctx, opts=opts, owner=owner, repo=destination, slug=new_slug, wait_interval=wait_interval, skip_errors=skip_errors, attempts=sync_attempts, )
[ "def", "copy", "(", "ctx", ",", "opts", ",", "owner_repo_package", ",", "destination", ",", "skip_errors", ",", "wait_interval", ",", "no_wait_for_sync", ",", "sync_attempts", ",", ")", ":", "owner", ",", "source", ",", "slug", "=", "owner_repo_package", "click", ".", "echo", "(", "\"Copying %(slug)s package from %(source)s to %(dest)s ... \"", "%", "{", "\"slug\"", ":", "click", ".", "style", "(", "slug", ",", "bold", "=", "True", ")", ",", "\"source\"", ":", "click", ".", "style", "(", "source", ",", "bold", "=", "True", ")", ",", "\"dest\"", ":", "click", ".", "style", "(", "destination", ",", "bold", "=", "True", ")", ",", "}", ",", "nl", "=", "False", ",", ")", "context_msg", "=", "\"Failed to copy package!\"", "with", "handle_api_exceptions", "(", "ctx", ",", "opts", "=", "opts", ",", "context_msg", "=", "context_msg", ",", "reraise_on_error", "=", "skip_errors", ")", ":", "with", "maybe_spinner", "(", "opts", ")", ":", "_", ",", "new_slug", "=", "copy_package", "(", "owner", "=", "owner", ",", "repo", "=", "source", ",", "identifier", "=", "slug", ",", "destination", "=", "destination", ")", "click", ".", "secho", "(", "\"OK\"", ",", "fg", "=", "\"green\"", ")", "if", "no_wait_for_sync", ":", "return", "wait_for_package_sync", "(", "ctx", "=", "ctx", ",", "opts", "=", "opts", ",", "owner", "=", "owner", ",", "repo", "=", "destination", ",", "slug", "=", "new_slug", ",", "wait_interval", "=", "wait_interval", ",", "skip_errors", "=", "skip_errors", ",", "attempts", "=", "sync_attempts", ",", ")" ]
Copy a package to another repository. This requires appropriate permissions for both the source repository/package and the destination repository. - OWNER/REPO/PACKAGE: Specify the OWNER namespace (i.e. user or org), the REPO name where the package is stored, and the PACKAGE name (slug) of the package itself. All separated by a slash. Example: 'your-org/awesome-repo/better-pkg'. - DEST: Specify the DEST (destination) repository to copy the package to. This *must* be in the same namespace as the source repository. Example: 'other-repo' Full CLI example: $ cloudsmith cp your-org/awesome-repo/better-pkg other-repo
[ "Copy", "a", "package", "to", "another", "repository", "." ]
python
train
26.38806
tbreitenfeldt/invisible_ui
invisible_ui/elements/element.py
https://github.com/tbreitenfeldt/invisible_ui/blob/1a6907bfa61bded13fa9fb83ec7778c0df84487f/invisible_ui/elements/element.py#L42-L44
def selected(self, interrupt=False): """This object has been selected.""" self.ao2.output(self.get_title(), interrupt=interrupt)
[ "def", "selected", "(", "self", ",", "interrupt", "=", "False", ")", ":", "self", ".", "ao2", ".", "output", "(", "self", ".", "get_title", "(", ")", ",", "interrupt", "=", "interrupt", ")" ]
This object has been selected.
[ "This", "object", "has", "been", "selected", "." ]
python
train
47.333333
ArchiveTeam/wpull
wpull/processor/base.py
https://github.com/ArchiveTeam/wpull/blob/ddf051aa3322479325ba20aa778cb2cb97606bf5/wpull/processor/base.py#L55-L60
def _log_error(self, request, error): '''Log exceptions during a fetch.''' _logger.error( _('Fetching ‘{url}’ encountered an error: {error}'), url=request.url, error=error )
[ "def", "_log_error", "(", "self", ",", "request", ",", "error", ")", ":", "_logger", ".", "error", "(", "_", "(", "'Fetching ‘{url}’ encountered an error: {error}'),", "", "", "url", "=", "request", ".", "url", ",", "error", "=", "error", ")" ]
Log exceptions during a fetch.
[ "Log", "exceptions", "during", "a", "fetch", "." ]
python
train
36
wbond/certvalidator
certvalidator/validate.py
https://github.com/wbond/certvalidator/blob/c62233a713bcc36963e9d82323ec8d84f8e01485/certvalidator/validate.py#L1137-L1737
def verify_crl(cert, path, validation_context, use_deltas=True, cert_description=None, end_entity_name_override=None): """ Verifies a certificate against a list of CRLs, checking to make sure the certificate has not been revoked. Uses the algorithm from https://tools.ietf.org/html/rfc5280#section-6.3 as a basis, but the implementation differs to allow CRLs from unrecorded locations. :param cert: An asn1cyrpto.x509.Certificate object to check for in the CRLs :param path: A certvalidator.path.ValidationPath object of the cert's validation path :param certificate_lists: A list of asn1crypto.crl.CertificateList objects :param validation_context: A certvalidator.context.ValidationContext object to use for caching validation information :param use_deltas: A boolean indicating if delta CRLs should be used :param cert_description: A unicode string containing a description of the certificate to be used in exception messages :param end_entity_name_override: None or a unicode string of the name to use for the end-entity certificate when including in exception messages :raises: certvalidator.errors.CRLNoMatchesError - when none of the CRLs match the certificate certvalidator.errors.CRLValidationError - when any error occurs trying to verify the CertificateList certvalidator.errors.RevokedError - when the CRL indicates the certificate has been revoked """ if not isinstance(cert, x509.Certificate): raise TypeError(pretty_message( ''' cert must be an instance of asn1crypto.x509.Certificate, not %s ''', type_name(cert) )) if not isinstance(path, ValidationPath): raise TypeError(pretty_message( ''' path must be an instance of certvalidator.path.ValidationPath, not %s ''', type_name(path) )) if not isinstance(validation_context, ValidationContext): raise TypeError(pretty_message( ''' validation_context must be an instance of certvalidator.context.ValidationContext, not %s ''', type_name(validation_context) )) if cert_description is None: cert_description = 'the certificate' if not isinstance(cert_description, str_cls): raise TypeError(pretty_message( ''' cert_description must be a unicode string, not %s ''', type_name(cert_description) )) moment = validation_context.moment certificate_registry = validation_context.certificate_registry certificate_lists = validation_context.retrieve_crls(cert) cert_issuer = path.find_issuer(cert) complete_lists_by_issuer = {} delta_lists_by_issuer = {} for certificate_list in certificate_lists: issuer_hashable = certificate_list.issuer.hashable if certificate_list.delta_crl_indicator_value is None: if issuer_hashable not in complete_lists_by_issuer: complete_lists_by_issuer[issuer_hashable] = [] complete_lists_by_issuer[issuer_hashable].append(certificate_list) else: if issuer_hashable not in delta_lists_by_issuer: delta_lists_by_issuer[issuer_hashable] = [] delta_lists_by_issuer[issuer_hashable].append(certificate_list) # In the main loop, only complete CRLs are processed, so delta CRLs are # weeded out of the todo list crls_to_process = [] for issuer_crls in complete_lists_by_issuer.values(): crls_to_process.extend(issuer_crls) total_crls = len(crls_to_process) # Build a lookup table for the Distribution point objects associated with # an issuer name hashable distribution_point_map = {} sources = [cert.crl_distribution_points] if use_deltas: sources.extend(cert.delta_crl_distribution_points) for dp_list in sources: for distribution_point in dp_list: if isinstance(distribution_point['crl_issuer'], x509.GeneralNames): dp_name_hashes = [] for general_name in distribution_point['crl_issuer']: if general_name.name == 'directory_name': dp_name_hashes.append(general_name.chosen.hashable) else: dp_name_hashes = [cert.issuer.hashable] for dp_name_hash in dp_name_hashes: if dp_name_hash not in distribution_point_map: distribution_point_map[dp_name_hash] = [] distribution_point_map[dp_name_hash].append(distribution_point) valid_reasons = set([ 'key_compromise', 'ca_compromise', 'affiliation_changed', 'superseded', 'cessation_of_operation', 'certificate_hold', 'privilege_withdrawn', 'aa_compromise', ]) known_extensions = set([ 'issuer_alt_name', 'crl_number', 'delta_crl_indicator', 'issuing_distribution_point', 'authority_key_identifier', 'freshest_crl', 'authority_information_access', ]) checked_reasons = set() failures = [] issuer_failures = 0 while len(crls_to_process) > 0: certificate_list = crls_to_process.pop(0) crl_idp = certificate_list.issuing_distribution_point_value delta_certificate_list = None delta_crl_idp = None interim_reasons = set() crl_issuer = None crl_issuer_name = None is_indirect = False if crl_idp and crl_idp['indirect_crl'].native: is_indirect = True crl_idp_name = crl_idp['distribution_point'] if crl_idp_name: if crl_idp_name.name == 'full_name': crl_issuer_name = crl_idp_name.chosen[0].chosen else: crl_issuer_name = cert_issuer.subject.copy().chosen.append( crl_idp_name.chosen ) elif certificate_list.authority_key_identifier: tmp_crl_issuer = certificate_registry.retrieve_by_key_identifier( certificate_list.authority_key_identifier ) crl_issuer_name = tmp_crl_issuer.subject else: failures.append(( 'CRL is marked as an indirect CRL, but provides no ' 'mechanism for locating the CRL issuer certificate', certificate_list )) continue else: crl_issuer_name = certificate_list.issuer if not crl_issuer: crl_issuer = validation_context.check_crl_issuer(certificate_list) if not crl_issuer: candidate_crl_issuers = certificate_registry.retrieve_by_name(crl_issuer_name, cert_issuer) candidates_skipped = 0 signatures_failed = 0 unauthorized_certs = 0 if not candidate_crl_issuers and crl_issuer_name != certificate_list.issuer: candidate_crl_issuers = certificate_registry.retrieve_by_name(certificate_list.issuer, cert_issuer) for candidate_crl_issuer in candidate_crl_issuers: direct_issuer = candidate_crl_issuer.subject == cert_issuer.subject # In some cases an indirect CRL issuer is a certificate issued # by the certificate issuer. However, we need to ensure that # the candidate CRL issuer is not the certificate being checked, # otherwise we may be checking an incorrect CRL and produce # incorrect results. indirect_issuer = candidate_crl_issuer.issuer == cert_issuer.subject indirect_issuer = indirect_issuer and candidate_crl_issuer.sha256 != cert.sha256 if not direct_issuer and not indirect_issuer and not is_indirect: candidates_skipped += 1 continue # Step f candidate_crl_issuer_path = None if validation_context: candidate_crl_issuer_path = validation_context.check_validation(candidate_crl_issuer) if candidate_crl_issuer_path is None: candidate_crl_issuer_path = path.copy().truncate_to_issuer(candidate_crl_issuer) candidate_crl_issuer_path.append(candidate_crl_issuer) try: # Pre-emptively mark a path as validated to prevent recursion if validation_context: validation_context.record_validation(candidate_crl_issuer, candidate_crl_issuer_path) temp_override = end_entity_name_override if temp_override is None and candidate_crl_issuer.sha256 != cert_issuer.sha256: temp_override = cert_description + ' CRL issuer' _validate_path( validation_context, candidate_crl_issuer_path, end_entity_name_override=temp_override ) except (PathValidationError) as e: # If the validation did not work out, clear it if validation_context: validation_context.clear_validation(candidate_crl_issuer) # We let a revoked error fall through since step k will catch # it with a correct error message if isinstance(e, RevokedError): raise raise CRLValidationError('CRL issuer certificate path could not be validated') key_usage_value = candidate_crl_issuer.key_usage_value if key_usage_value and 'crl_sign' not in key_usage_value.native: unauthorized_certs += 1 continue try: # Step g _verify_signature(certificate_list, candidate_crl_issuer) crl_issuer = candidate_crl_issuer break except (CRLValidationError): signatures_failed += 1 continue if crl_issuer is None: if candidates_skipped == len(candidate_crl_issuers): issuer_failures += 1 else: if signatures_failed == len(candidate_crl_issuers): failures.append(( 'CRL signature could not be verified', certificate_list )) elif unauthorized_certs == len(candidate_crl_issuers): failures.append(( 'The CRL issuer is not authorized to sign CRLs', certificate_list )) else: failures.append(( 'Unable to locate CRL issuer certificate', certificate_list )) continue else: validation_context.record_crl_issuer(certificate_list, crl_issuer) # Step b 1 has_dp_crl_issuer = False dp_match = False dps = cert.crl_distribution_points_value if dps: crl_issuer_general_name = x509.GeneralName( name='directory_name', value=crl_issuer.subject ) for dp in dps: if dp['crl_issuer']: has_dp_crl_issuer = True if crl_issuer_general_name in dp['crl_issuer']: dp_match = True same_issuer = crl_issuer.subject == cert_issuer.subject indirect_match = has_dp_crl_issuer and dp_match and is_indirect missing_idp = has_dp_crl_issuer and (not dp_match or not is_indirect) indirect_crl_issuer = crl_issuer.issuer == cert_issuer.subject if (not same_issuer and not indirect_match and not indirect_crl_issuer) or missing_idp: issuer_failures += 1 continue # Check to make sure the CRL is valid for the moment specified if moment < certificate_list['tbs_cert_list']['this_update'].native: failures.append(( 'CRL is from after the validation time', certificate_list )) continue if moment > certificate_list['tbs_cert_list']['next_update'].native: failures.append(( 'CRL should have been regenerated by the validation time', certificate_list )) continue # Step b 2 if crl_idp is not None: # Step b 2 i has_idp_name = False has_dp_name = False idp_dp_match = False idp_general_names = [] idp_dp_name = crl_idp['distribution_point'] if idp_dp_name: has_idp_name = True if idp_dp_name.name == 'full_name': for general_name in idp_dp_name.chosen: idp_general_names.append(general_name) else: inner_extended_issuer_name = crl_issuer.subject.copy() inner_extended_issuer_name.chosen.append(idp_dp_name.chosen.untag()) idp_general_names.append(x509.GeneralName( name='directory_name', value=inner_extended_issuer_name )) dps = cert.crl_distribution_points_value if dps: for dp in dps: if idp_dp_match: break dp_name = dp['distribution_point'] if dp_name: has_dp_name = True if dp_name.name == 'full_name': for general_name in dp_name.chosen: if general_name in idp_general_names: idp_dp_match = True break else: inner_extended_issuer_name = crl_issuer.subject.copy() inner_extended_issuer_name.chosen.append(dp_name.chosen.untag()) dp_extended_issuer_name = x509.GeneralName( name='directory_name', value=inner_extended_issuer_name ) if dp_extended_issuer_name in idp_general_names: idp_dp_match = True elif dp['crl_issuer']: has_dp_name = True for dp_crl_issuer_name in dp['crl_issuer']: if dp_crl_issuer_name in idp_general_names: idp_dp_match = True break else: # If there is no DP, we consider the CRL issuer name to be it has_dp_name = True general_name = x509.GeneralName( name='directory_name', value=crl_issuer_name ) if general_name in idp_general_names: idp_dp_match = True idp_dp_match_failed = has_idp_name and has_dp_name and not idp_dp_match if idp_dp_match_failed: failures.append(( pretty_message( ''' The CRL issuing distribution point extension does not share any names with the certificate CRL distribution point extension ''' ), certificate_list )) issuer_failures += 1 continue # Step b 2 ii if crl_idp['only_contains_user_certs'].native: if cert.basic_constraints_value and cert.basic_constraints_value['ca'].native: failures.append(( pretty_message( ''' CRL only contains end-entity certificates and certificate is a CA certificate ''' ), certificate_list )) continue # Step b 2 iii if crl_idp['only_contains_ca_certs'].native: if not cert.basic_constraints_value or cert.basic_constraints_value['ca'].native is False: failures.append(( pretty_message( ''' CRL only contains CA certificates and certificate is an end-entity certificate ''' ), certificate_list )) continue # Step b 2 iv if crl_idp['only_contains_attribute_certs'].native: failures.append(( 'CRL only contains attribute certificates', certificate_list )) continue # Step c if use_deltas and certificate_list.freshest_crl_value and len(certificate_list.freshest_crl_value) > 0: for candidate_delta_cl in delta_lists_by_issuer.get(crl_issuer_name.hashable, []): # Step c 1 if candidate_delta_cl.issuer != crl_issuer_name: continue # Step c 2 delta_crl_idp = candidate_delta_cl.issuing_distribution_point_value if (crl_idp is None and delta_crl_idp is not None) or (crl_idp is not None and delta_crl_idp is None): continue if crl_idp and crl_idp.native != delta_crl_idp.native: continue # Step c 3 if certificate_list.authority_key_identifier != candidate_delta_cl.authority_key_identifier: continue delta_certificate_list = candidate_delta_cl break # Step d idp_reasons = None if crl_idp and crl_idp['only_some_reasons'].native is not None: idp_reasons = crl_idp['only_some_reasons'].native reason_keys = None if idp_reasons: reason_keys = idp_reasons if reason_keys is None: interim_reasons = valid_reasons.copy() else: interim_reasons = reason_keys # Step e # We don't skip a CRL if it only contains reasons already checked since # a certificate issuer can self-issue a new cert that is used for CRLs if certificate_list.critical_extensions - known_extensions: failures.append(( 'One or more unrecognized critical extensions are present in ' 'the CRL', certificate_list )) continue if use_deltas and delta_certificate_list and delta_certificate_list.critical_extensions - known_extensions: failures.append(( 'One or more unrecognized critical extensions are present in ' 'the delta CRL', delta_certificate_list )) continue # Step h if use_deltas and delta_certificate_list: try: _verify_signature(delta_certificate_list, crl_issuer) except (CRLValidationError): failures.append(( 'Delta CRL signature could not be verified', certificate_list, delta_certificate_list )) continue if moment < delta_certificate_list['tbs_cert_list']['this_update'].native: failures.append(( 'Delta CRL is from after the validation time', certificate_list, delta_certificate_list )) continue if moment > delta_certificate_list['tbs_cert_list']['next_update'].native: failures.append(( 'Delta CRL is from before the validation time', certificate_list, delta_certificate_list )) continue # Step i revoked_reason = None revoked_date = None if use_deltas and delta_certificate_list: try: revoked_date, revoked_reason = _find_cert_in_list(cert, cert_issuer, delta_certificate_list, crl_issuer) except (NotImplementedError): failures.append(( 'One or more critical extensions are present in the CRL ' 'entry for the certificate', delta_certificate_list )) continue # Step j if revoked_reason is None: try: revoked_date, revoked_reason = _find_cert_in_list(cert, cert_issuer, certificate_list, crl_issuer) except (NotImplementedError): failures.append(( 'One or more critical extensions are present in the CRL ' 'entry for the certificate', certificate_list )) continue # Step k if revoked_reason and revoked_reason.native == 'remove_from_crl': revoked_reason = None revoked_date = None if revoked_reason: reason = revoked_reason.human_friendly date = revoked_date.native.strftime('%Y-%m-%d') time = revoked_date.native.strftime('%H:%M:%S') raise RevokedError(pretty_message( ''' CRL indicates %s was revoked at %s on %s, due to %s ''', cert_description, time, date, reason )) # Step l checked_reasons |= interim_reasons # CRLs should not include this value, but at least one of the examples # from the NIST test suite does checked_reasons -= set(['unused']) if checked_reasons != valid_reasons: if total_crls == issuer_failures: raise CRLNoMatchesError(pretty_message( ''' No CRLs were issued by the issuer of %s, or any indirect CRL issuer ''', cert_description )) if not failures: failures.append(( 'The available CRLs do not cover all revocation reasons', )) raise CRLValidationIndeterminateError( pretty_message( ''' Unable to determine if %s is revoked due to insufficient information from known CRLs ''', cert_description ), failures )
[ "def", "verify_crl", "(", "cert", ",", "path", ",", "validation_context", ",", "use_deltas", "=", "True", ",", "cert_description", "=", "None", ",", "end_entity_name_override", "=", "None", ")", ":", "if", "not", "isinstance", "(", "cert", ",", "x509", ".", "Certificate", ")", ":", "raise", "TypeError", "(", "pretty_message", "(", "'''\n cert must be an instance of asn1crypto.x509.Certificate, not %s\n '''", ",", "type_name", "(", "cert", ")", ")", ")", "if", "not", "isinstance", "(", "path", ",", "ValidationPath", ")", ":", "raise", "TypeError", "(", "pretty_message", "(", "'''\n path must be an instance of certvalidator.path.ValidationPath,\n not %s\n '''", ",", "type_name", "(", "path", ")", ")", ")", "if", "not", "isinstance", "(", "validation_context", ",", "ValidationContext", ")", ":", "raise", "TypeError", "(", "pretty_message", "(", "'''\n validation_context must be an instance of\n certvalidator.context.ValidationContext, not %s\n '''", ",", "type_name", "(", "validation_context", ")", ")", ")", "if", "cert_description", "is", "None", ":", "cert_description", "=", "'the certificate'", "if", "not", "isinstance", "(", "cert_description", ",", "str_cls", ")", ":", "raise", "TypeError", "(", "pretty_message", "(", "'''\n cert_description must be a unicode string, not %s\n '''", ",", "type_name", "(", "cert_description", ")", ")", ")", "moment", "=", "validation_context", ".", "moment", "certificate_registry", "=", "validation_context", ".", "certificate_registry", "certificate_lists", "=", "validation_context", ".", "retrieve_crls", "(", "cert", ")", "cert_issuer", "=", "path", ".", "find_issuer", "(", "cert", ")", "complete_lists_by_issuer", "=", "{", "}", "delta_lists_by_issuer", "=", "{", "}", "for", "certificate_list", "in", "certificate_lists", ":", "issuer_hashable", "=", "certificate_list", ".", "issuer", ".", "hashable", "if", "certificate_list", ".", "delta_crl_indicator_value", "is", "None", ":", "if", "issuer_hashable", "not", "in", "complete_lists_by_issuer", ":", "complete_lists_by_issuer", "[", "issuer_hashable", "]", "=", "[", "]", "complete_lists_by_issuer", "[", "issuer_hashable", "]", ".", "append", "(", "certificate_list", ")", "else", ":", "if", "issuer_hashable", "not", "in", "delta_lists_by_issuer", ":", "delta_lists_by_issuer", "[", "issuer_hashable", "]", "=", "[", "]", "delta_lists_by_issuer", "[", "issuer_hashable", "]", ".", "append", "(", "certificate_list", ")", "# In the main loop, only complete CRLs are processed, so delta CRLs are", "# weeded out of the todo list", "crls_to_process", "=", "[", "]", "for", "issuer_crls", "in", "complete_lists_by_issuer", ".", "values", "(", ")", ":", "crls_to_process", ".", "extend", "(", "issuer_crls", ")", "total_crls", "=", "len", "(", "crls_to_process", ")", "# Build a lookup table for the Distribution point objects associated with", "# an issuer name hashable", "distribution_point_map", "=", "{", "}", "sources", "=", "[", "cert", ".", "crl_distribution_points", "]", "if", "use_deltas", ":", "sources", ".", "extend", "(", "cert", ".", "delta_crl_distribution_points", ")", "for", "dp_list", "in", "sources", ":", "for", "distribution_point", "in", "dp_list", ":", "if", "isinstance", "(", "distribution_point", "[", "'crl_issuer'", "]", ",", "x509", ".", "GeneralNames", ")", ":", "dp_name_hashes", "=", "[", "]", "for", "general_name", "in", "distribution_point", "[", "'crl_issuer'", "]", ":", "if", "general_name", ".", "name", "==", "'directory_name'", ":", "dp_name_hashes", ".", "append", "(", "general_name", ".", "chosen", ".", "hashable", ")", "else", ":", "dp_name_hashes", "=", "[", "cert", ".", "issuer", ".", "hashable", "]", "for", "dp_name_hash", "in", "dp_name_hashes", ":", "if", "dp_name_hash", "not", "in", "distribution_point_map", ":", "distribution_point_map", "[", "dp_name_hash", "]", "=", "[", "]", "distribution_point_map", "[", "dp_name_hash", "]", ".", "append", "(", "distribution_point", ")", "valid_reasons", "=", "set", "(", "[", "'key_compromise'", ",", "'ca_compromise'", ",", "'affiliation_changed'", ",", "'superseded'", ",", "'cessation_of_operation'", ",", "'certificate_hold'", ",", "'privilege_withdrawn'", ",", "'aa_compromise'", ",", "]", ")", "known_extensions", "=", "set", "(", "[", "'issuer_alt_name'", ",", "'crl_number'", ",", "'delta_crl_indicator'", ",", "'issuing_distribution_point'", ",", "'authority_key_identifier'", ",", "'freshest_crl'", ",", "'authority_information_access'", ",", "]", ")", "checked_reasons", "=", "set", "(", ")", "failures", "=", "[", "]", "issuer_failures", "=", "0", "while", "len", "(", "crls_to_process", ")", ">", "0", ":", "certificate_list", "=", "crls_to_process", ".", "pop", "(", "0", ")", "crl_idp", "=", "certificate_list", ".", "issuing_distribution_point_value", "delta_certificate_list", "=", "None", "delta_crl_idp", "=", "None", "interim_reasons", "=", "set", "(", ")", "crl_issuer", "=", "None", "crl_issuer_name", "=", "None", "is_indirect", "=", "False", "if", "crl_idp", "and", "crl_idp", "[", "'indirect_crl'", "]", ".", "native", ":", "is_indirect", "=", "True", "crl_idp_name", "=", "crl_idp", "[", "'distribution_point'", "]", "if", "crl_idp_name", ":", "if", "crl_idp_name", ".", "name", "==", "'full_name'", ":", "crl_issuer_name", "=", "crl_idp_name", ".", "chosen", "[", "0", "]", ".", "chosen", "else", ":", "crl_issuer_name", "=", "cert_issuer", ".", "subject", ".", "copy", "(", ")", ".", "chosen", ".", "append", "(", "crl_idp_name", ".", "chosen", ")", "elif", "certificate_list", ".", "authority_key_identifier", ":", "tmp_crl_issuer", "=", "certificate_registry", ".", "retrieve_by_key_identifier", "(", "certificate_list", ".", "authority_key_identifier", ")", "crl_issuer_name", "=", "tmp_crl_issuer", ".", "subject", "else", ":", "failures", ".", "append", "(", "(", "'CRL is marked as an indirect CRL, but provides no '", "'mechanism for locating the CRL issuer certificate'", ",", "certificate_list", ")", ")", "continue", "else", ":", "crl_issuer_name", "=", "certificate_list", ".", "issuer", "if", "not", "crl_issuer", ":", "crl_issuer", "=", "validation_context", ".", "check_crl_issuer", "(", "certificate_list", ")", "if", "not", "crl_issuer", ":", "candidate_crl_issuers", "=", "certificate_registry", ".", "retrieve_by_name", "(", "crl_issuer_name", ",", "cert_issuer", ")", "candidates_skipped", "=", "0", "signatures_failed", "=", "0", "unauthorized_certs", "=", "0", "if", "not", "candidate_crl_issuers", "and", "crl_issuer_name", "!=", "certificate_list", ".", "issuer", ":", "candidate_crl_issuers", "=", "certificate_registry", ".", "retrieve_by_name", "(", "certificate_list", ".", "issuer", ",", "cert_issuer", ")", "for", "candidate_crl_issuer", "in", "candidate_crl_issuers", ":", "direct_issuer", "=", "candidate_crl_issuer", ".", "subject", "==", "cert_issuer", ".", "subject", "# In some cases an indirect CRL issuer is a certificate issued", "# by the certificate issuer. However, we need to ensure that", "# the candidate CRL issuer is not the certificate being checked,", "# otherwise we may be checking an incorrect CRL and produce", "# incorrect results.", "indirect_issuer", "=", "candidate_crl_issuer", ".", "issuer", "==", "cert_issuer", ".", "subject", "indirect_issuer", "=", "indirect_issuer", "and", "candidate_crl_issuer", ".", "sha256", "!=", "cert", ".", "sha256", "if", "not", "direct_issuer", "and", "not", "indirect_issuer", "and", "not", "is_indirect", ":", "candidates_skipped", "+=", "1", "continue", "# Step f", "candidate_crl_issuer_path", "=", "None", "if", "validation_context", ":", "candidate_crl_issuer_path", "=", "validation_context", ".", "check_validation", "(", "candidate_crl_issuer", ")", "if", "candidate_crl_issuer_path", "is", "None", ":", "candidate_crl_issuer_path", "=", "path", ".", "copy", "(", ")", ".", "truncate_to_issuer", "(", "candidate_crl_issuer", ")", "candidate_crl_issuer_path", ".", "append", "(", "candidate_crl_issuer", ")", "try", ":", "# Pre-emptively mark a path as validated to prevent recursion", "if", "validation_context", ":", "validation_context", ".", "record_validation", "(", "candidate_crl_issuer", ",", "candidate_crl_issuer_path", ")", "temp_override", "=", "end_entity_name_override", "if", "temp_override", "is", "None", "and", "candidate_crl_issuer", ".", "sha256", "!=", "cert_issuer", ".", "sha256", ":", "temp_override", "=", "cert_description", "+", "' CRL issuer'", "_validate_path", "(", "validation_context", ",", "candidate_crl_issuer_path", ",", "end_entity_name_override", "=", "temp_override", ")", "except", "(", "PathValidationError", ")", "as", "e", ":", "# If the validation did not work out, clear it", "if", "validation_context", ":", "validation_context", ".", "clear_validation", "(", "candidate_crl_issuer", ")", "# We let a revoked error fall through since step k will catch", "# it with a correct error message", "if", "isinstance", "(", "e", ",", "RevokedError", ")", ":", "raise", "raise", "CRLValidationError", "(", "'CRL issuer certificate path could not be validated'", ")", "key_usage_value", "=", "candidate_crl_issuer", ".", "key_usage_value", "if", "key_usage_value", "and", "'crl_sign'", "not", "in", "key_usage_value", ".", "native", ":", "unauthorized_certs", "+=", "1", "continue", "try", ":", "# Step g", "_verify_signature", "(", "certificate_list", ",", "candidate_crl_issuer", ")", "crl_issuer", "=", "candidate_crl_issuer", "break", "except", "(", "CRLValidationError", ")", ":", "signatures_failed", "+=", "1", "continue", "if", "crl_issuer", "is", "None", ":", "if", "candidates_skipped", "==", "len", "(", "candidate_crl_issuers", ")", ":", "issuer_failures", "+=", "1", "else", ":", "if", "signatures_failed", "==", "len", "(", "candidate_crl_issuers", ")", ":", "failures", ".", "append", "(", "(", "'CRL signature could not be verified'", ",", "certificate_list", ")", ")", "elif", "unauthorized_certs", "==", "len", "(", "candidate_crl_issuers", ")", ":", "failures", ".", "append", "(", "(", "'The CRL issuer is not authorized to sign CRLs'", ",", "certificate_list", ")", ")", "else", ":", "failures", ".", "append", "(", "(", "'Unable to locate CRL issuer certificate'", ",", "certificate_list", ")", ")", "continue", "else", ":", "validation_context", ".", "record_crl_issuer", "(", "certificate_list", ",", "crl_issuer", ")", "# Step b 1", "has_dp_crl_issuer", "=", "False", "dp_match", "=", "False", "dps", "=", "cert", ".", "crl_distribution_points_value", "if", "dps", ":", "crl_issuer_general_name", "=", "x509", ".", "GeneralName", "(", "name", "=", "'directory_name'", ",", "value", "=", "crl_issuer", ".", "subject", ")", "for", "dp", "in", "dps", ":", "if", "dp", "[", "'crl_issuer'", "]", ":", "has_dp_crl_issuer", "=", "True", "if", "crl_issuer_general_name", "in", "dp", "[", "'crl_issuer'", "]", ":", "dp_match", "=", "True", "same_issuer", "=", "crl_issuer", ".", "subject", "==", "cert_issuer", ".", "subject", "indirect_match", "=", "has_dp_crl_issuer", "and", "dp_match", "and", "is_indirect", "missing_idp", "=", "has_dp_crl_issuer", "and", "(", "not", "dp_match", "or", "not", "is_indirect", ")", "indirect_crl_issuer", "=", "crl_issuer", ".", "issuer", "==", "cert_issuer", ".", "subject", "if", "(", "not", "same_issuer", "and", "not", "indirect_match", "and", "not", "indirect_crl_issuer", ")", "or", "missing_idp", ":", "issuer_failures", "+=", "1", "continue", "# Check to make sure the CRL is valid for the moment specified", "if", "moment", "<", "certificate_list", "[", "'tbs_cert_list'", "]", "[", "'this_update'", "]", ".", "native", ":", "failures", ".", "append", "(", "(", "'CRL is from after the validation time'", ",", "certificate_list", ")", ")", "continue", "if", "moment", ">", "certificate_list", "[", "'tbs_cert_list'", "]", "[", "'next_update'", "]", ".", "native", ":", "failures", ".", "append", "(", "(", "'CRL should have been regenerated by the validation time'", ",", "certificate_list", ")", ")", "continue", "# Step b 2", "if", "crl_idp", "is", "not", "None", ":", "# Step b 2 i", "has_idp_name", "=", "False", "has_dp_name", "=", "False", "idp_dp_match", "=", "False", "idp_general_names", "=", "[", "]", "idp_dp_name", "=", "crl_idp", "[", "'distribution_point'", "]", "if", "idp_dp_name", ":", "has_idp_name", "=", "True", "if", "idp_dp_name", ".", "name", "==", "'full_name'", ":", "for", "general_name", "in", "idp_dp_name", ".", "chosen", ":", "idp_general_names", ".", "append", "(", "general_name", ")", "else", ":", "inner_extended_issuer_name", "=", "crl_issuer", ".", "subject", ".", "copy", "(", ")", "inner_extended_issuer_name", ".", "chosen", ".", "append", "(", "idp_dp_name", ".", "chosen", ".", "untag", "(", ")", ")", "idp_general_names", ".", "append", "(", "x509", ".", "GeneralName", "(", "name", "=", "'directory_name'", ",", "value", "=", "inner_extended_issuer_name", ")", ")", "dps", "=", "cert", ".", "crl_distribution_points_value", "if", "dps", ":", "for", "dp", "in", "dps", ":", "if", "idp_dp_match", ":", "break", "dp_name", "=", "dp", "[", "'distribution_point'", "]", "if", "dp_name", ":", "has_dp_name", "=", "True", "if", "dp_name", ".", "name", "==", "'full_name'", ":", "for", "general_name", "in", "dp_name", ".", "chosen", ":", "if", "general_name", "in", "idp_general_names", ":", "idp_dp_match", "=", "True", "break", "else", ":", "inner_extended_issuer_name", "=", "crl_issuer", ".", "subject", ".", "copy", "(", ")", "inner_extended_issuer_name", ".", "chosen", ".", "append", "(", "dp_name", ".", "chosen", ".", "untag", "(", ")", ")", "dp_extended_issuer_name", "=", "x509", ".", "GeneralName", "(", "name", "=", "'directory_name'", ",", "value", "=", "inner_extended_issuer_name", ")", "if", "dp_extended_issuer_name", "in", "idp_general_names", ":", "idp_dp_match", "=", "True", "elif", "dp", "[", "'crl_issuer'", "]", ":", "has_dp_name", "=", "True", "for", "dp_crl_issuer_name", "in", "dp", "[", "'crl_issuer'", "]", ":", "if", "dp_crl_issuer_name", "in", "idp_general_names", ":", "idp_dp_match", "=", "True", "break", "else", ":", "# If there is no DP, we consider the CRL issuer name to be it", "has_dp_name", "=", "True", "general_name", "=", "x509", ".", "GeneralName", "(", "name", "=", "'directory_name'", ",", "value", "=", "crl_issuer_name", ")", "if", "general_name", "in", "idp_general_names", ":", "idp_dp_match", "=", "True", "idp_dp_match_failed", "=", "has_idp_name", "and", "has_dp_name", "and", "not", "idp_dp_match", "if", "idp_dp_match_failed", ":", "failures", ".", "append", "(", "(", "pretty_message", "(", "'''\n The CRL issuing distribution point extension does not\n share any names with the certificate CRL distribution\n point extension\n '''", ")", ",", "certificate_list", ")", ")", "issuer_failures", "+=", "1", "continue", "# Step b 2 ii", "if", "crl_idp", "[", "'only_contains_user_certs'", "]", ".", "native", ":", "if", "cert", ".", "basic_constraints_value", "and", "cert", ".", "basic_constraints_value", "[", "'ca'", "]", ".", "native", ":", "failures", ".", "append", "(", "(", "pretty_message", "(", "'''\n CRL only contains end-entity certificates and\n certificate is a CA certificate\n '''", ")", ",", "certificate_list", ")", ")", "continue", "# Step b 2 iii", "if", "crl_idp", "[", "'only_contains_ca_certs'", "]", ".", "native", ":", "if", "not", "cert", ".", "basic_constraints_value", "or", "cert", ".", "basic_constraints_value", "[", "'ca'", "]", ".", "native", "is", "False", ":", "failures", ".", "append", "(", "(", "pretty_message", "(", "'''\n CRL only contains CA certificates and certificate\n is an end-entity certificate\n '''", ")", ",", "certificate_list", ")", ")", "continue", "# Step b 2 iv", "if", "crl_idp", "[", "'only_contains_attribute_certs'", "]", ".", "native", ":", "failures", ".", "append", "(", "(", "'CRL only contains attribute certificates'", ",", "certificate_list", ")", ")", "continue", "# Step c", "if", "use_deltas", "and", "certificate_list", ".", "freshest_crl_value", "and", "len", "(", "certificate_list", ".", "freshest_crl_value", ")", ">", "0", ":", "for", "candidate_delta_cl", "in", "delta_lists_by_issuer", ".", "get", "(", "crl_issuer_name", ".", "hashable", ",", "[", "]", ")", ":", "# Step c 1", "if", "candidate_delta_cl", ".", "issuer", "!=", "crl_issuer_name", ":", "continue", "# Step c 2", "delta_crl_idp", "=", "candidate_delta_cl", ".", "issuing_distribution_point_value", "if", "(", "crl_idp", "is", "None", "and", "delta_crl_idp", "is", "not", "None", ")", "or", "(", "crl_idp", "is", "not", "None", "and", "delta_crl_idp", "is", "None", ")", ":", "continue", "if", "crl_idp", "and", "crl_idp", ".", "native", "!=", "delta_crl_idp", ".", "native", ":", "continue", "# Step c 3", "if", "certificate_list", ".", "authority_key_identifier", "!=", "candidate_delta_cl", ".", "authority_key_identifier", ":", "continue", "delta_certificate_list", "=", "candidate_delta_cl", "break", "# Step d", "idp_reasons", "=", "None", "if", "crl_idp", "and", "crl_idp", "[", "'only_some_reasons'", "]", ".", "native", "is", "not", "None", ":", "idp_reasons", "=", "crl_idp", "[", "'only_some_reasons'", "]", ".", "native", "reason_keys", "=", "None", "if", "idp_reasons", ":", "reason_keys", "=", "idp_reasons", "if", "reason_keys", "is", "None", ":", "interim_reasons", "=", "valid_reasons", ".", "copy", "(", ")", "else", ":", "interim_reasons", "=", "reason_keys", "# Step e", "# We don't skip a CRL if it only contains reasons already checked since", "# a certificate issuer can self-issue a new cert that is used for CRLs", "if", "certificate_list", ".", "critical_extensions", "-", "known_extensions", ":", "failures", ".", "append", "(", "(", "'One or more unrecognized critical extensions are present in '", "'the CRL'", ",", "certificate_list", ")", ")", "continue", "if", "use_deltas", "and", "delta_certificate_list", "and", "delta_certificate_list", ".", "critical_extensions", "-", "known_extensions", ":", "failures", ".", "append", "(", "(", "'One or more unrecognized critical extensions are present in '", "'the delta CRL'", ",", "delta_certificate_list", ")", ")", "continue", "# Step h", "if", "use_deltas", "and", "delta_certificate_list", ":", "try", ":", "_verify_signature", "(", "delta_certificate_list", ",", "crl_issuer", ")", "except", "(", "CRLValidationError", ")", ":", "failures", ".", "append", "(", "(", "'Delta CRL signature could not be verified'", ",", "certificate_list", ",", "delta_certificate_list", ")", ")", "continue", "if", "moment", "<", "delta_certificate_list", "[", "'tbs_cert_list'", "]", "[", "'this_update'", "]", ".", "native", ":", "failures", ".", "append", "(", "(", "'Delta CRL is from after the validation time'", ",", "certificate_list", ",", "delta_certificate_list", ")", ")", "continue", "if", "moment", ">", "delta_certificate_list", "[", "'tbs_cert_list'", "]", "[", "'next_update'", "]", ".", "native", ":", "failures", ".", "append", "(", "(", "'Delta CRL is from before the validation time'", ",", "certificate_list", ",", "delta_certificate_list", ")", ")", "continue", "# Step i", "revoked_reason", "=", "None", "revoked_date", "=", "None", "if", "use_deltas", "and", "delta_certificate_list", ":", "try", ":", "revoked_date", ",", "revoked_reason", "=", "_find_cert_in_list", "(", "cert", ",", "cert_issuer", ",", "delta_certificate_list", ",", "crl_issuer", ")", "except", "(", "NotImplementedError", ")", ":", "failures", ".", "append", "(", "(", "'One or more critical extensions are present in the CRL '", "'entry for the certificate'", ",", "delta_certificate_list", ")", ")", "continue", "# Step j", "if", "revoked_reason", "is", "None", ":", "try", ":", "revoked_date", ",", "revoked_reason", "=", "_find_cert_in_list", "(", "cert", ",", "cert_issuer", ",", "certificate_list", ",", "crl_issuer", ")", "except", "(", "NotImplementedError", ")", ":", "failures", ".", "append", "(", "(", "'One or more critical extensions are present in the CRL '", "'entry for the certificate'", ",", "certificate_list", ")", ")", "continue", "# Step k", "if", "revoked_reason", "and", "revoked_reason", ".", "native", "==", "'remove_from_crl'", ":", "revoked_reason", "=", "None", "revoked_date", "=", "None", "if", "revoked_reason", ":", "reason", "=", "revoked_reason", ".", "human_friendly", "date", "=", "revoked_date", ".", "native", ".", "strftime", "(", "'%Y-%m-%d'", ")", "time", "=", "revoked_date", ".", "native", ".", "strftime", "(", "'%H:%M:%S'", ")", "raise", "RevokedError", "(", "pretty_message", "(", "'''\n CRL indicates %s was revoked at %s on %s, due to %s\n '''", ",", "cert_description", ",", "time", ",", "date", ",", "reason", ")", ")", "# Step l", "checked_reasons", "|=", "interim_reasons", "# CRLs should not include this value, but at least one of the examples", "# from the NIST test suite does", "checked_reasons", "-=", "set", "(", "[", "'unused'", "]", ")", "if", "checked_reasons", "!=", "valid_reasons", ":", "if", "total_crls", "==", "issuer_failures", ":", "raise", "CRLNoMatchesError", "(", "pretty_message", "(", "'''\n No CRLs were issued by the issuer of %s, or any indirect CRL\n issuer\n '''", ",", "cert_description", ")", ")", "if", "not", "failures", ":", "failures", ".", "append", "(", "(", "'The available CRLs do not cover all revocation reasons'", ",", ")", ")", "raise", "CRLValidationIndeterminateError", "(", "pretty_message", "(", "'''\n Unable to determine if %s is revoked due to insufficient\n information from known CRLs\n '''", ",", "cert_description", ")", ",", "failures", ")" ]
Verifies a certificate against a list of CRLs, checking to make sure the certificate has not been revoked. Uses the algorithm from https://tools.ietf.org/html/rfc5280#section-6.3 as a basis, but the implementation differs to allow CRLs from unrecorded locations. :param cert: An asn1cyrpto.x509.Certificate object to check for in the CRLs :param path: A certvalidator.path.ValidationPath object of the cert's validation path :param certificate_lists: A list of asn1crypto.crl.CertificateList objects :param validation_context: A certvalidator.context.ValidationContext object to use for caching validation information :param use_deltas: A boolean indicating if delta CRLs should be used :param cert_description: A unicode string containing a description of the certificate to be used in exception messages :param end_entity_name_override: None or a unicode string of the name to use for the end-entity certificate when including in exception messages :raises: certvalidator.errors.CRLNoMatchesError - when none of the CRLs match the certificate certvalidator.errors.CRLValidationError - when any error occurs trying to verify the CertificateList certvalidator.errors.RevokedError - when the CRL indicates the certificate has been revoked
[ "Verifies", "a", "certificate", "against", "a", "list", "of", "CRLs", "checking", "to", "make", "sure", "the", "certificate", "has", "not", "been", "revoked", ".", "Uses", "the", "algorithm", "from", "https", ":", "//", "tools", ".", "ietf", ".", "org", "/", "html", "/", "rfc5280#section", "-", "6", ".", "3", "as", "a", "basis", "but", "the", "implementation", "differs", "to", "allow", "CRLs", "from", "unrecorded", "locations", "." ]
python
train
38.34609
newville/wxmplot
wxmplot/basepanel.py
https://github.com/newville/wxmplot/blob/8e0dc037453e5cdf18c968dc5a3d29efd761edee/wxmplot/basepanel.py#L458-L492
def zoom_motion(self, event=None): """motion event handler for zoom mode""" try: x, y = event.x, event.y except: return self.report_motion(event=event) if self.zoom_ini is None: return ini_x, ini_y, ini_xd, ini_yd = self.zoom_ini if event.xdata is not None: self.x_lastmove = event.xdata if event.ydata is not None: self.y_lastmove = event.ydata x0 = min(x, ini_x) ymax = max(y, ini_y) width = abs(x-ini_x) height = abs(y-ini_y) y0 = self.canvas.figure.bbox.height - ymax zdc = wx.ClientDC(self.canvas) zdc.SetLogicalFunction(wx.XOR) zdc.SetBrush(wx.TRANSPARENT_BRUSH) zdc.SetPen(wx.Pen('White', 2, wx.SOLID)) zdc.ResetBoundingBox() if not is_wxPhoenix: zdc.BeginDrawing() # erase previous box if self.rbbox is not None: zdc.DrawRectangle(*self.rbbox) self.rbbox = (x0, y0, width, height) zdc.DrawRectangle(*self.rbbox) if not is_wxPhoenix: zdc.EndDrawing()
[ "def", "zoom_motion", "(", "self", ",", "event", "=", "None", ")", ":", "try", ":", "x", ",", "y", "=", "event", ".", "x", ",", "event", ".", "y", "except", ":", "return", "self", ".", "report_motion", "(", "event", "=", "event", ")", "if", "self", ".", "zoom_ini", "is", "None", ":", "return", "ini_x", ",", "ini_y", ",", "ini_xd", ",", "ini_yd", "=", "self", ".", "zoom_ini", "if", "event", ".", "xdata", "is", "not", "None", ":", "self", ".", "x_lastmove", "=", "event", ".", "xdata", "if", "event", ".", "ydata", "is", "not", "None", ":", "self", ".", "y_lastmove", "=", "event", ".", "ydata", "x0", "=", "min", "(", "x", ",", "ini_x", ")", "ymax", "=", "max", "(", "y", ",", "ini_y", ")", "width", "=", "abs", "(", "x", "-", "ini_x", ")", "height", "=", "abs", "(", "y", "-", "ini_y", ")", "y0", "=", "self", ".", "canvas", ".", "figure", ".", "bbox", ".", "height", "-", "ymax", "zdc", "=", "wx", ".", "ClientDC", "(", "self", ".", "canvas", ")", "zdc", ".", "SetLogicalFunction", "(", "wx", ".", "XOR", ")", "zdc", ".", "SetBrush", "(", "wx", ".", "TRANSPARENT_BRUSH", ")", "zdc", ".", "SetPen", "(", "wx", ".", "Pen", "(", "'White'", ",", "2", ",", "wx", ".", "SOLID", ")", ")", "zdc", ".", "ResetBoundingBox", "(", ")", "if", "not", "is_wxPhoenix", ":", "zdc", ".", "BeginDrawing", "(", ")", "# erase previous box", "if", "self", ".", "rbbox", "is", "not", "None", ":", "zdc", ".", "DrawRectangle", "(", "*", "self", ".", "rbbox", ")", "self", ".", "rbbox", "=", "(", "x0", ",", "y0", ",", "width", ",", "height", ")", "zdc", ".", "DrawRectangle", "(", "*", "self", ".", "rbbox", ")", "if", "not", "is_wxPhoenix", ":", "zdc", ".", "EndDrawing", "(", ")" ]
motion event handler for zoom mode
[ "motion", "event", "handler", "for", "zoom", "mode" ]
python
train
32.142857
derpferd/little-python
littlepython/parser.py
https://github.com/derpferd/little-python/blob/3f89c74cffb6532c12c5b40843bd8ff8605638ba/littlepython/parser.py#L208-L231
def variable(self): """ variable : variable Feature Type Array adds: variable : variable[expression] Feature Type Func adds: variable : variable(arg_list) """ var = Var(self.cur_token) self.eat(TokenTypes.VAR) if Features.TYPE_ARRAY in self.features: while self.cur_token.type == TokenTypes.LBRACKET: self.eat(TokenTypes.LBRACKET) # Start passed the logical ops. expr = self.operator_expression(level=2) self.eat(TokenTypes.RBRACKET) var = GetArrayItem(left=var, right=expr) if Features.FUNC in self.features: if self.cur_token.type == TokenTypes.LPAREN: self.eat(TokenTypes.LPAREN) args = self.arg_list() self.eat(TokenTypes.RPAREN) var = Call(var, args) return var
[ "def", "variable", "(", "self", ")", ":", "var", "=", "Var", "(", "self", ".", "cur_token", ")", "self", ".", "eat", "(", "TokenTypes", ".", "VAR", ")", "if", "Features", ".", "TYPE_ARRAY", "in", "self", ".", "features", ":", "while", "self", ".", "cur_token", ".", "type", "==", "TokenTypes", ".", "LBRACKET", ":", "self", ".", "eat", "(", "TokenTypes", ".", "LBRACKET", ")", "# Start passed the logical ops.", "expr", "=", "self", ".", "operator_expression", "(", "level", "=", "2", ")", "self", ".", "eat", "(", "TokenTypes", ".", "RBRACKET", ")", "var", "=", "GetArrayItem", "(", "left", "=", "var", ",", "right", "=", "expr", ")", "if", "Features", ".", "FUNC", "in", "self", ".", "features", ":", "if", "self", ".", "cur_token", ".", "type", "==", "TokenTypes", ".", "LPAREN", ":", "self", ".", "eat", "(", "TokenTypes", ".", "LPAREN", ")", "args", "=", "self", ".", "arg_list", "(", ")", "self", ".", "eat", "(", "TokenTypes", ".", "RPAREN", ")", "var", "=", "Call", "(", "var", ",", "args", ")", "return", "var" ]
variable : variable Feature Type Array adds: variable : variable[expression] Feature Type Func adds: variable : variable(arg_list)
[ "variable", ":", "variable", "Feature", "Type", "Array", "adds", ":", "variable", ":", "variable", "[", "expression", "]", "Feature", "Type", "Func", "adds", ":", "variable", ":", "variable", "(", "arg_list", ")" ]
python
train
38.166667
klen/pylama
pylama/core.py
https://github.com/klen/pylama/blob/f436ccc6b55b33381a295ded753e467953cf4379/pylama/core.py#L186-L196
def merge_params(params, lparams): """Merge global ignore/select with linter local params.""" ignore = params.get('ignore', set()) if 'ignore' in lparams: ignore = ignore | set(lparams['ignore']) select = params.get('select', set()) if 'select' in lparams: select = select | set(lparams['select']) return ignore, select
[ "def", "merge_params", "(", "params", ",", "lparams", ")", ":", "ignore", "=", "params", ".", "get", "(", "'ignore'", ",", "set", "(", ")", ")", "if", "'ignore'", "in", "lparams", ":", "ignore", "=", "ignore", "|", "set", "(", "lparams", "[", "'ignore'", "]", ")", "select", "=", "params", ".", "get", "(", "'select'", ",", "set", "(", ")", ")", "if", "'select'", "in", "lparams", ":", "select", "=", "select", "|", "set", "(", "lparams", "[", "'select'", "]", ")", "return", "ignore", ",", "select" ]
Merge global ignore/select with linter local params.
[ "Merge", "global", "ignore", "/", "select", "with", "linter", "local", "params", "." ]
python
train
31.909091
kiwiz/gkeepapi
gkeepapi/__init__.py
https://github.com/kiwiz/gkeepapi/blob/78aaae8b988b1cf616e3973f7f15d4c6d5e996cc/gkeepapi/__init__.py#L663-L679
def createNote(self, title=None, text=None): """Create a new managed note. Any changes to the note will be uploaded when :py:meth:`sync` is called. Args: title (str): The title of the note. text (str): The text of the note. Returns: gkeepapi.node.List: The new note. """ node = _node.Note() if title is not None: node.title = title if text is not None: node.text = text self.add(node) return node
[ "def", "createNote", "(", "self", ",", "title", "=", "None", ",", "text", "=", "None", ")", ":", "node", "=", "_node", ".", "Note", "(", ")", "if", "title", "is", "not", "None", ":", "node", ".", "title", "=", "title", "if", "text", "is", "not", "None", ":", "node", ".", "text", "=", "text", "self", ".", "add", "(", "node", ")", "return", "node" ]
Create a new managed note. Any changes to the note will be uploaded when :py:meth:`sync` is called. Args: title (str): The title of the note. text (str): The text of the note. Returns: gkeepapi.node.List: The new note.
[ "Create", "a", "new", "managed", "note", ".", "Any", "changes", "to", "the", "note", "will", "be", "uploaded", "when", ":", "py", ":", "meth", ":", "sync", "is", "called", "." ]
python
train
30.235294
pvlib/pvlib-python
pvlib/modelchain.py
https://github.com/pvlib/pvlib-python/blob/2e844a595b820b43d1170269781fa66bd0ccc8a3/pvlib/modelchain.py#L714-L788
def complete_irradiance(self, times=None, weather=None): """ Determine the missing irradiation columns. Only two of the following data columns (dni, ghi, dhi) are needed to calculate the missing data. This function is not safe at the moment. Results can be too high or negative. Please contribute and help to improve this function on https://github.com/pvlib/pvlib-python Parameters ---------- times : None or DatetimeIndex, default None Times at which to evaluate the model. Can be None if attribute `times` is already set. weather : None or pandas.DataFrame, default None Table with at least two columns containing one of the following data sets: dni, dhi, ghi. Can be None if attribute `weather` is already set. Returns ------- self Assigns attributes: times, weather Examples -------- This example does not work until the parameters `my_system`, `my_location`, `my_datetime` and `my_weather` are not defined properly but shows the basic idea how this method can be used. >>> from pvlib.modelchain import ModelChain >>> # my_weather containing 'dhi' and 'ghi'. >>> mc = ModelChain(my_system, my_location) # doctest: +SKIP >>> mc.complete_irradiance(my_datetime, my_weather) # doctest: +SKIP >>> mc.run_model() # doctest: +SKIP >>> # my_weather containing 'dhi', 'ghi' and 'dni'. >>> mc = ModelChain(my_system, my_location) # doctest: +SKIP >>> mc.run_model(my_datetime, my_weather) # doctest: +SKIP """ if weather is not None: self.weather = weather if times is not None: self.times = times self.solar_position = self.location.get_solarposition( self.times, method=self.solar_position_method) icolumns = set(self.weather.columns) wrn_txt = ("This function is not safe at the moment.\n" + "Results can be too high or negative.\n" + "Help to improve this function on github:\n" + "https://github.com/pvlib/pvlib-python \n") if {'ghi', 'dhi'} <= icolumns and 'dni' not in icolumns: clearsky = self.location.get_clearsky( times, solar_position=self.solar_position) self.weather.loc[:, 'dni'] = pvlib.irradiance.dni( self.weather.loc[:, 'ghi'], self.weather.loc[:, 'dhi'], self.solar_position.zenith, clearsky_dni=clearsky['dni'], clearsky_tolerance=1.1) elif {'dni', 'dhi'} <= icolumns and 'ghi' not in icolumns: warnings.warn(wrn_txt, UserWarning) self.weather.loc[:, 'ghi'] = ( self.weather.dni * tools.cosd(self.solar_position.zenith) + self.weather.dhi) elif {'dni', 'ghi'} <= icolumns and 'dhi' not in icolumns: warnings.warn(wrn_txt, UserWarning) self.weather.loc[:, 'dhi'] = ( self.weather.ghi - self.weather.dni * tools.cosd(self.solar_position.zenith)) return self
[ "def", "complete_irradiance", "(", "self", ",", "times", "=", "None", ",", "weather", "=", "None", ")", ":", "if", "weather", "is", "not", "None", ":", "self", ".", "weather", "=", "weather", "if", "times", "is", "not", "None", ":", "self", ".", "times", "=", "times", "self", ".", "solar_position", "=", "self", ".", "location", ".", "get_solarposition", "(", "self", ".", "times", ",", "method", "=", "self", ".", "solar_position_method", ")", "icolumns", "=", "set", "(", "self", ".", "weather", ".", "columns", ")", "wrn_txt", "=", "(", "\"This function is not safe at the moment.\\n\"", "+", "\"Results can be too high or negative.\\n\"", "+", "\"Help to improve this function on github:\\n\"", "+", "\"https://github.com/pvlib/pvlib-python \\n\"", ")", "if", "{", "'ghi'", ",", "'dhi'", "}", "<=", "icolumns", "and", "'dni'", "not", "in", "icolumns", ":", "clearsky", "=", "self", ".", "location", ".", "get_clearsky", "(", "times", ",", "solar_position", "=", "self", ".", "solar_position", ")", "self", ".", "weather", ".", "loc", "[", ":", ",", "'dni'", "]", "=", "pvlib", ".", "irradiance", ".", "dni", "(", "self", ".", "weather", ".", "loc", "[", ":", ",", "'ghi'", "]", ",", "self", ".", "weather", ".", "loc", "[", ":", ",", "'dhi'", "]", ",", "self", ".", "solar_position", ".", "zenith", ",", "clearsky_dni", "=", "clearsky", "[", "'dni'", "]", ",", "clearsky_tolerance", "=", "1.1", ")", "elif", "{", "'dni'", ",", "'dhi'", "}", "<=", "icolumns", "and", "'ghi'", "not", "in", "icolumns", ":", "warnings", ".", "warn", "(", "wrn_txt", ",", "UserWarning", ")", "self", ".", "weather", ".", "loc", "[", ":", ",", "'ghi'", "]", "=", "(", "self", ".", "weather", ".", "dni", "*", "tools", ".", "cosd", "(", "self", ".", "solar_position", ".", "zenith", ")", "+", "self", ".", "weather", ".", "dhi", ")", "elif", "{", "'dni'", ",", "'ghi'", "}", "<=", "icolumns", "and", "'dhi'", "not", "in", "icolumns", ":", "warnings", ".", "warn", "(", "wrn_txt", ",", "UserWarning", ")", "self", ".", "weather", ".", "loc", "[", ":", ",", "'dhi'", "]", "=", "(", "self", ".", "weather", ".", "ghi", "-", "self", ".", "weather", ".", "dni", "*", "tools", ".", "cosd", "(", "self", ".", "solar_position", ".", "zenith", ")", ")", "return", "self" ]
Determine the missing irradiation columns. Only two of the following data columns (dni, ghi, dhi) are needed to calculate the missing data. This function is not safe at the moment. Results can be too high or negative. Please contribute and help to improve this function on https://github.com/pvlib/pvlib-python Parameters ---------- times : None or DatetimeIndex, default None Times at which to evaluate the model. Can be None if attribute `times` is already set. weather : None or pandas.DataFrame, default None Table with at least two columns containing one of the following data sets: dni, dhi, ghi. Can be None if attribute `weather` is already set. Returns ------- self Assigns attributes: times, weather Examples -------- This example does not work until the parameters `my_system`, `my_location`, `my_datetime` and `my_weather` are not defined properly but shows the basic idea how this method can be used. >>> from pvlib.modelchain import ModelChain >>> # my_weather containing 'dhi' and 'ghi'. >>> mc = ModelChain(my_system, my_location) # doctest: +SKIP >>> mc.complete_irradiance(my_datetime, my_weather) # doctest: +SKIP >>> mc.run_model() # doctest: +SKIP >>> # my_weather containing 'dhi', 'ghi' and 'dni'. >>> mc = ModelChain(my_system, my_location) # doctest: +SKIP >>> mc.run_model(my_datetime, my_weather) # doctest: +SKIP
[ "Determine", "the", "missing", "irradiation", "columns", ".", "Only", "two", "of", "the", "following", "data", "columns", "(", "dni", "ghi", "dhi", ")", "are", "needed", "to", "calculate", "the", "missing", "data", "." ]
python
train
42.48
sbaechler/django-multilingual-search
multilingual/elasticsearch_backend.py
https://github.com/sbaechler/django-multilingual-search/blob/485c690d865da3267b19e073e28d3e2290f36611/multilingual/elasticsearch_backend.py#L108-L142
def update(self, index, iterable, commit=True): """ Updates the index with current data. :param index: The search_indexes.Index object :param iterable: The queryset :param commit: commit to the backend. """ parler = False # setup here because self.existing_mappings are overridden. if not self.setup_complete: try: self.setup() except elasticsearch.TransportError as e: if not self.silently_fail: raise self.log.error("Failed to add documents to Elasticsearch: %s", e) return if hasattr(iterable, 'language') and hasattr(iterable.language, '__call__'): parler = True # Django-parler for language in self.languages: self.index_name = self._index_name_for_language(language) # self.log.debug('updating index for {0}'.format(language)) if parler: # workaround for django-parler for item in iterable: item.set_current_language(language) super(ElasticsearchMultilingualSearchBackend, self).update( index, iterable, commit) else: with translation.override(language): super(ElasticsearchMultilingualSearchBackend, self).update( index, iterable, commit)
[ "def", "update", "(", "self", ",", "index", ",", "iterable", ",", "commit", "=", "True", ")", ":", "parler", "=", "False", "# setup here because self.existing_mappings are overridden.", "if", "not", "self", ".", "setup_complete", ":", "try", ":", "self", ".", "setup", "(", ")", "except", "elasticsearch", ".", "TransportError", "as", "e", ":", "if", "not", "self", ".", "silently_fail", ":", "raise", "self", ".", "log", ".", "error", "(", "\"Failed to add documents to Elasticsearch: %s\"", ",", "e", ")", "return", "if", "hasattr", "(", "iterable", ",", "'language'", ")", "and", "hasattr", "(", "iterable", ".", "language", ",", "'__call__'", ")", ":", "parler", "=", "True", "# Django-parler", "for", "language", "in", "self", ".", "languages", ":", "self", ".", "index_name", "=", "self", ".", "_index_name_for_language", "(", "language", ")", "# self.log.debug('updating index for {0}'.format(language))", "if", "parler", ":", "# workaround for django-parler", "for", "item", "in", "iterable", ":", "item", ".", "set_current_language", "(", "language", ")", "super", "(", "ElasticsearchMultilingualSearchBackend", ",", "self", ")", ".", "update", "(", "index", ",", "iterable", ",", "commit", ")", "else", ":", "with", "translation", ".", "override", "(", "language", ")", ":", "super", "(", "ElasticsearchMultilingualSearchBackend", ",", "self", ")", ".", "update", "(", "index", ",", "iterable", ",", "commit", ")" ]
Updates the index with current data. :param index: The search_indexes.Index object :param iterable: The queryset :param commit: commit to the backend.
[ "Updates", "the", "index", "with", "current", "data", ".", ":", "param", "index", ":", "The", "search_indexes", ".", "Index", "object", ":", "param", "iterable", ":", "The", "queryset", ":", "param", "commit", ":", "commit", "to", "the", "backend", "." ]
python
train
40.542857
tmr232/Sark
sark/structure.py
https://github.com/tmr232/Sark/blob/bee62879c2aea553a3924d887e2b30f2a6008581/sark/structure.py#L84-L100
def get_struct(name): """Get a struct by it's name. Args: name: The name of the struct Returns: The struct's id Raises: exceptions.SarkStructNotFound: is the struct does not exist. """ sid = idc.GetStrucIdByName(name) if sid == idaapi.BADADDR: raise exceptions.SarkStructNotFound() return sid
[ "def", "get_struct", "(", "name", ")", ":", "sid", "=", "idc", ".", "GetStrucIdByName", "(", "name", ")", "if", "sid", "==", "idaapi", ".", "BADADDR", ":", "raise", "exceptions", ".", "SarkStructNotFound", "(", ")", "return", "sid" ]
Get a struct by it's name. Args: name: The name of the struct Returns: The struct's id Raises: exceptions.SarkStructNotFound: is the struct does not exist.
[ "Get", "a", "struct", "by", "it", "s", "name", "." ]
python
train
20.235294
rosenbrockc/fortpy
fortpy/code.py
https://github.com/rosenbrockc/fortpy/blob/1ed0757c52d549e41d9d44bdea68cb89529293a5/fortpy/code.py#L224-L231
def _add_current_codedir(self, path): """Adds the directory of the file at the specified path as a base path to find other files in. """ dirpath = self.tramp.dirname(path) if dirpath not in self.basepaths: self.basepaths.append(dirpath) self.rescan()
[ "def", "_add_current_codedir", "(", "self", ",", "path", ")", ":", "dirpath", "=", "self", ".", "tramp", ".", "dirname", "(", "path", ")", "if", "dirpath", "not", "in", "self", ".", "basepaths", ":", "self", ".", "basepaths", ".", "append", "(", "dirpath", ")", "self", ".", "rescan", "(", ")" ]
Adds the directory of the file at the specified path as a base path to find other files in.
[ "Adds", "the", "directory", "of", "the", "file", "at", "the", "specified", "path", "as", "a", "base", "path", "to", "find", "other", "files", "in", "." ]
python
train
38.375
codelv/enaml-native
src/enamlnative/android/app.py
https://github.com/codelv/enaml-native/blob/c33986e9eda468c508806e0a3e73c771401e5718/src/enamlnative/android/app.py#L246-L257
def _on_permission_result(self, code, perms, results): """ Handles a permission request result by passing it to the handler with the given code. """ #: Get the handler for this request handler = self._permission_requests.get(code, None) if handler is not None: del self._permission_requests[code] #: Invoke that handler with the permission request response handler(code, perms, results)
[ "def", "_on_permission_result", "(", "self", ",", "code", ",", "perms", ",", "results", ")", ":", "#: Get the handler for this request", "handler", "=", "self", ".", "_permission_requests", ".", "get", "(", "code", ",", "None", ")", "if", "handler", "is", "not", "None", ":", "del", "self", ".", "_permission_requests", "[", "code", "]", "#: Invoke that handler with the permission request response", "handler", "(", "code", ",", "perms", ",", "results", ")" ]
Handles a permission request result by passing it to the handler with the given code.
[ "Handles", "a", "permission", "request", "result", "by", "passing", "it", "to", "the", "handler", "with", "the", "given", "code", "." ]
python
train
39.25
jasonlaska/spherecluster
spherecluster/von_mises_fisher_mixture.py
https://github.com/jasonlaska/spherecluster/blob/701b0b1909088a56e353b363b2672580d4fe9d93/spherecluster/von_mises_fisher_mixture.py#L25-L33
def _inertia_from_labels(X, centers, labels): """Compute inertia with cosine distance using known labels. """ n_examples, n_features = X.shape inertia = np.zeros((n_examples,)) for ee in range(n_examples): inertia[ee] = 1 - X[ee, :].dot(centers[int(labels[ee]), :].T) return np.sum(inertia)
[ "def", "_inertia_from_labels", "(", "X", ",", "centers", ",", "labels", ")", ":", "n_examples", ",", "n_features", "=", "X", ".", "shape", "inertia", "=", "np", ".", "zeros", "(", "(", "n_examples", ",", ")", ")", "for", "ee", "in", "range", "(", "n_examples", ")", ":", "inertia", "[", "ee", "]", "=", "1", "-", "X", "[", "ee", ",", ":", "]", ".", "dot", "(", "centers", "[", "int", "(", "labels", "[", "ee", "]", ")", ",", ":", "]", ".", "T", ")", "return", "np", ".", "sum", "(", "inertia", ")" ]
Compute inertia with cosine distance using known labels.
[ "Compute", "inertia", "with", "cosine", "distance", "using", "known", "labels", "." ]
python
train
35
linode/linode_api4-python
linode_api4/linode_client.py
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L67-L98
def stackscripts(self, *filters, **kwargs): """ Returns a list of :any:`StackScripts<StackScript>`, both public and private. You may filter this query to return only :any:`StackScripts<StackScript>` that match certain criteria. You may also request only your own private :any:`StackScripts<StackScript>`:: my_stackscripts = client.linode.stackscripts(mine_only=True) :param filters: Any number of filters to apply to this query. :param mine_only: If True, returns only private StackScripts :type mine_only: bool :returns: A list of StackScripts matching the query. :rtype: PaginatedList of StackScript """ # python2 can't handle *args and a single keyword argument, so this is a workaround if 'mine_only' in kwargs: if kwargs['mine_only']: new_filter = Filter({"mine":True}) if filters: filters = [ f for f in filters ] filters[0] = filters[0] & new_filter else: filters = [new_filter] del kwargs['mine_only'] if kwargs: raise TypeError("stackscripts() got unexpected keyword argument '{}'".format(kwargs.popitem()[0])) return self.client._get_and_filter(StackScript, *filters)
[ "def", "stackscripts", "(", "self", ",", "*", "filters", ",", "*", "*", "kwargs", ")", ":", "# python2 can't handle *args and a single keyword argument, so this is a workaround", "if", "'mine_only'", "in", "kwargs", ":", "if", "kwargs", "[", "'mine_only'", "]", ":", "new_filter", "=", "Filter", "(", "{", "\"mine\"", ":", "True", "}", ")", "if", "filters", ":", "filters", "=", "[", "f", "for", "f", "in", "filters", "]", "filters", "[", "0", "]", "=", "filters", "[", "0", "]", "&", "new_filter", "else", ":", "filters", "=", "[", "new_filter", "]", "del", "kwargs", "[", "'mine_only'", "]", "if", "kwargs", ":", "raise", "TypeError", "(", "\"stackscripts() got unexpected keyword argument '{}'\"", ".", "format", "(", "kwargs", ".", "popitem", "(", ")", "[", "0", "]", ")", ")", "return", "self", ".", "client", ".", "_get_and_filter", "(", "StackScript", ",", "*", "filters", ")" ]
Returns a list of :any:`StackScripts<StackScript>`, both public and private. You may filter this query to return only :any:`StackScripts<StackScript>` that match certain criteria. You may also request only your own private :any:`StackScripts<StackScript>`:: my_stackscripts = client.linode.stackscripts(mine_only=True) :param filters: Any number of filters to apply to this query. :param mine_only: If True, returns only private StackScripts :type mine_only: bool :returns: A list of StackScripts matching the query. :rtype: PaginatedList of StackScript
[ "Returns", "a", "list", "of", ":", "any", ":", "StackScripts<StackScript", ">", "both", "public", "and", "private", ".", "You", "may", "filter", "this", "query", "to", "return", "only", ":", "any", ":", "StackScripts<StackScript", ">", "that", "match", "certain", "criteria", ".", "You", "may", "also", "request", "only", "your", "own", "private", ":", "any", ":", "StackScripts<StackScript", ">", "::" ]
python
train
41.53125
mattrobenolt/ec2
ec2/helpers.py
https://github.com/mattrobenolt/ec2/blob/fc1f8bce6cf76899165d9ac006371181d52439f8/ec2/helpers.py#L12-L22
def make_compare(key, value, obj): "Map a key name to a specific comparison function" if '__' not in key: # If no __ exists, default to doing an "exact" comparison key, comp = key, 'exact' else: key, comp = key.rsplit('__', 1) # Check if comp is valid if hasattr(Compare, comp): return getattr(Compare, comp)(key, value, obj) raise AttributeError("No comparison '%s'" % comp)
[ "def", "make_compare", "(", "key", ",", "value", ",", "obj", ")", ":", "if", "'__'", "not", "in", "key", ":", "# If no __ exists, default to doing an \"exact\" comparison", "key", ",", "comp", "=", "key", ",", "'exact'", "else", ":", "key", ",", "comp", "=", "key", ".", "rsplit", "(", "'__'", ",", "1", ")", "# Check if comp is valid", "if", "hasattr", "(", "Compare", ",", "comp", ")", ":", "return", "getattr", "(", "Compare", ",", "comp", ")", "(", "key", ",", "value", ",", "obj", ")", "raise", "AttributeError", "(", "\"No comparison '%s'\"", "%", "comp", ")" ]
Map a key name to a specific comparison function
[ "Map", "a", "key", "name", "to", "a", "specific", "comparison", "function" ]
python
train
38.272727
KrzyHonk/bpmn-python
bpmn_python/bpmn_diagram_rep.py
https://github.com/KrzyHonk/bpmn-python/blob/6e5e28e3d656dbf5bd3d85d78fe8e3f2fb462629/bpmn_python/bpmn_diagram_rep.py#L416-L435
def add_gateway_to_diagram(self, process_id, gateway_type, gateway_name="", gateway_direction="Unspecified", node_id=None): """ Adds an exclusiveGateway element to BPMN diagram. :param process_id: string object. ID of parent process, :param gateway_type: string object. Type of gateway to be added. :param gateway_name: string object. Name of exclusive gateway, :param gateway_direction: string object. Accepted values - "Unspecified", "Converging", "Diverging", "Mixed". Default value - "Unspecified", :param node_id: string object. ID of node. Default value - None. :return: a tuple, where first value is gateway ID, second a reference to created object. """ gateway_id, gateway = self.add_flow_node_to_diagram(process_id, gateway_type, gateway_name, node_id) if not (gateway_direction in ("Unspecified", "Converging", "Diverging", "Mixed")): raise bpmn_exception.BpmnPythonError("Invalid value passed as gatewayDirection parameter. Value passed: " + gateway_direction) self.diagram_graph.node[gateway_id][consts.Consts.gateway_direction] = gateway_direction return gateway_id, gateway
[ "def", "add_gateway_to_diagram", "(", "self", ",", "process_id", ",", "gateway_type", ",", "gateway_name", "=", "\"\"", ",", "gateway_direction", "=", "\"Unspecified\"", ",", "node_id", "=", "None", ")", ":", "gateway_id", ",", "gateway", "=", "self", ".", "add_flow_node_to_diagram", "(", "process_id", ",", "gateway_type", ",", "gateway_name", ",", "node_id", ")", "if", "not", "(", "gateway_direction", "in", "(", "\"Unspecified\"", ",", "\"Converging\"", ",", "\"Diverging\"", ",", "\"Mixed\"", ")", ")", ":", "raise", "bpmn_exception", ".", "BpmnPythonError", "(", "\"Invalid value passed as gatewayDirection parameter. Value passed: \"", "+", "gateway_direction", ")", "self", ".", "diagram_graph", ".", "node", "[", "gateway_id", "]", "[", "consts", ".", "Consts", ".", "gateway_direction", "]", "=", "gateway_direction", "return", "gateway_id", ",", "gateway" ]
Adds an exclusiveGateway element to BPMN diagram. :param process_id: string object. ID of parent process, :param gateway_type: string object. Type of gateway to be added. :param gateway_name: string object. Name of exclusive gateway, :param gateway_direction: string object. Accepted values - "Unspecified", "Converging", "Diverging", "Mixed". Default value - "Unspecified", :param node_id: string object. ID of node. Default value - None. :return: a tuple, where first value is gateway ID, second a reference to created object.
[ "Adds", "an", "exclusiveGateway", "element", "to", "BPMN", "diagram", "." ]
python
train
63.9
Qiskit/qiskit-terra
qiskit/quantum_info/operators/channel/transformations.py
https://github.com/Qiskit/qiskit-terra/blob/d4f58d903bc96341b816f7c35df936d6421267d1/qiskit/quantum_info/operators/channel/transformations.py#L115-L124
def _to_operator(rep, data, input_dim, output_dim): """Transform a QuantumChannel to the Operator representation.""" if rep == 'Operator': return data if rep == 'Stinespring': return _stinespring_to_operator(data, input_dim, output_dim) # Convert via Kraus representation if rep != 'Kraus': data = _to_kraus(rep, data, input_dim, output_dim) return _kraus_to_operator(data, input_dim, output_dim)
[ "def", "_to_operator", "(", "rep", ",", "data", ",", "input_dim", ",", "output_dim", ")", ":", "if", "rep", "==", "'Operator'", ":", "return", "data", "if", "rep", "==", "'Stinespring'", ":", "return", "_stinespring_to_operator", "(", "data", ",", "input_dim", ",", "output_dim", ")", "# Convert via Kraus representation", "if", "rep", "!=", "'Kraus'", ":", "data", "=", "_to_kraus", "(", "rep", ",", "data", ",", "input_dim", ",", "output_dim", ")", "return", "_kraus_to_operator", "(", "data", ",", "input_dim", ",", "output_dim", ")" ]
Transform a QuantumChannel to the Operator representation.
[ "Transform", "a", "QuantumChannel", "to", "the", "Operator", "representation", "." ]
python
test
43.5
MolSSI-BSE/basis_set_exchange
basis_set_exchange/cli/check.py
https://github.com/MolSSI-BSE/basis_set_exchange/blob/e79110aaeb65f392ed5032420322dee3336948f7/basis_set_exchange/cli/check.py#L71-L84
def _cli_check_basis(name, data_dir): '''Checks that a basis set exists and if not, raises a helpful exception''' if name is None: return None name = misc.transform_basis_name(name) metadata = api.get_metadata(data_dir) if not name in metadata: errstr = "Basis set '" + name + "' does not exist.\n" errstr += "For a complete list of basis sets, use the 'bse list-basis-sets' command" raise RuntimeError(errstr) return name
[ "def", "_cli_check_basis", "(", "name", ",", "data_dir", ")", ":", "if", "name", "is", "None", ":", "return", "None", "name", "=", "misc", ".", "transform_basis_name", "(", "name", ")", "metadata", "=", "api", ".", "get_metadata", "(", "data_dir", ")", "if", "not", "name", "in", "metadata", ":", "errstr", "=", "\"Basis set '\"", "+", "name", "+", "\"' does not exist.\\n\"", "errstr", "+=", "\"For a complete list of basis sets, use the 'bse list-basis-sets' command\"", "raise", "RuntimeError", "(", "errstr", ")", "return", "name" ]
Checks that a basis set exists and if not, raises a helpful exception
[ "Checks", "that", "a", "basis", "set", "exists", "and", "if", "not", "raises", "a", "helpful", "exception" ]
python
train
33.428571
ChristianTremblay/BAC0
BAC0/core/functions/discoverPoints.py
https://github.com/ChristianTremblay/BAC0/blob/8d95b065ea068524a08f5b0c34322ebeeba95d06/BAC0/core/functions/discoverPoints.py#L28-L139
def discoverPoints(bacnetapp, address, devID): """ Discover the BACnet points in a BACnet device. :param bacnetApp: The app itself so we can call read :param address: address of the device as a string (ex. '2:5') :param devID: device ID of the bacnet device as a string (ex. '1001') :returns: a tuple with deviceName, pss, objList, df * *deviceName* : name of the device * *pss* : protocole service supported * *objList* : list of bacnet object (ex. analogInput, 1) * *df* : is a dataFrame containing pointType, pointAddress, pointName, description presentValue and units If pandas can't be found, df will be a simple array """ pss = bacnetapp.read( "{} device {} protocolServicesSupported".format(address, devID) ) deviceName = bacnetapp.read("{} device {} objectName".format(address, devID)) # print('Device {}- building points list'.format(deviceName)) objList = bacnetapp.read("{} device {] objectList".format(address, devID)) newLine = [] result = [] points = [] for pointType, pointAddr in objList: if "binary" in pointType: # BI/BO/BV newLine = [pointType, pointAddr] infos = bacnetapp.readMultiple( "{} {} {} objectName description presentValue inactiveText activeText".format( address, pointType, pointAddr ) ) newLine.extend(infos[:-2]) newLine.extend([infos[-2:]]) newPoint = BooleanPoint( pointType=newLine[0], pointAddress=newLine[1], pointName=newLine[2], description=newLine[3], presentValue=newLine[4], units_state=newLine[5], ) elif "multiState" in pointType: # MI/MV/MO newLine = [pointType, pointAddr] newLine.extend( bacnetapp.readMultiple( "{} {} {} objectName description presentValue stateText".format( address, pointType, pointAddr ) ) ) newPoint = EnumPoint( pointType=newLine[0], pointAddress=newLine[1], pointName=newLine[2], description=newLine[3], presentValue=newLine[4], units_state=newLine[5], ) elif "analog" in pointType: # AI/AO/AV newLine = [pointType, pointAddr] newLine.extend( bacnetapp.readMultiple( "{} {} {} objectName description presentValue units".format( address, pointType, pointAddr ) ) ) newPoint = NumericPoint( pointType=newLine[0], pointAddress=newLine[1], pointName=newLine[2], description=newLine[3], presentValue=newLine[4], units_state=newLine[5], ) else: continue # skip result.append(newLine) points.append(newPoint) if _PANDA: df = pd.DataFrame( result, columns=[ "pointType", "pointAddress", "pointName", "description", "presentValue", "units_state", ], ).set_index(["pointName"]) else: df = result # print('Ready!') return (deviceName, pss, objList, df, points)
[ "def", "discoverPoints", "(", "bacnetapp", ",", "address", ",", "devID", ")", ":", "pss", "=", "bacnetapp", ".", "read", "(", "\"{} device {} protocolServicesSupported\"", ".", "format", "(", "address", ",", "devID", ")", ")", "deviceName", "=", "bacnetapp", ".", "read", "(", "\"{} device {} objectName\"", ".", "format", "(", "address", ",", "devID", ")", ")", "# print('Device {}- building points list'.format(deviceName))", "objList", "=", "bacnetapp", ".", "read", "(", "\"{} device {] objectList\"", ".", "format", "(", "address", ",", "devID", ")", ")", "newLine", "=", "[", "]", "result", "=", "[", "]", "points", "=", "[", "]", "for", "pointType", ",", "pointAddr", "in", "objList", ":", "if", "\"binary\"", "in", "pointType", ":", "# BI/BO/BV", "newLine", "=", "[", "pointType", ",", "pointAddr", "]", "infos", "=", "bacnetapp", ".", "readMultiple", "(", "\"{} {} {} objectName description presentValue inactiveText activeText\"", ".", "format", "(", "address", ",", "pointType", ",", "pointAddr", ")", ")", "newLine", ".", "extend", "(", "infos", "[", ":", "-", "2", "]", ")", "newLine", ".", "extend", "(", "[", "infos", "[", "-", "2", ":", "]", "]", ")", "newPoint", "=", "BooleanPoint", "(", "pointType", "=", "newLine", "[", "0", "]", ",", "pointAddress", "=", "newLine", "[", "1", "]", ",", "pointName", "=", "newLine", "[", "2", "]", ",", "description", "=", "newLine", "[", "3", "]", ",", "presentValue", "=", "newLine", "[", "4", "]", ",", "units_state", "=", "newLine", "[", "5", "]", ",", ")", "elif", "\"multiState\"", "in", "pointType", ":", "# MI/MV/MO", "newLine", "=", "[", "pointType", ",", "pointAddr", "]", "newLine", ".", "extend", "(", "bacnetapp", ".", "readMultiple", "(", "\"{} {} {} objectName description presentValue stateText\"", ".", "format", "(", "address", ",", "pointType", ",", "pointAddr", ")", ")", ")", "newPoint", "=", "EnumPoint", "(", "pointType", "=", "newLine", "[", "0", "]", ",", "pointAddress", "=", "newLine", "[", "1", "]", ",", "pointName", "=", "newLine", "[", "2", "]", ",", "description", "=", "newLine", "[", "3", "]", ",", "presentValue", "=", "newLine", "[", "4", "]", ",", "units_state", "=", "newLine", "[", "5", "]", ",", ")", "elif", "\"analog\"", "in", "pointType", ":", "# AI/AO/AV", "newLine", "=", "[", "pointType", ",", "pointAddr", "]", "newLine", ".", "extend", "(", "bacnetapp", ".", "readMultiple", "(", "\"{} {} {} objectName description presentValue units\"", ".", "format", "(", "address", ",", "pointType", ",", "pointAddr", ")", ")", ")", "newPoint", "=", "NumericPoint", "(", "pointType", "=", "newLine", "[", "0", "]", ",", "pointAddress", "=", "newLine", "[", "1", "]", ",", "pointName", "=", "newLine", "[", "2", "]", ",", "description", "=", "newLine", "[", "3", "]", ",", "presentValue", "=", "newLine", "[", "4", "]", ",", "units_state", "=", "newLine", "[", "5", "]", ",", ")", "else", ":", "continue", "# skip", "result", ".", "append", "(", "newLine", ")", "points", ".", "append", "(", "newPoint", ")", "if", "_PANDA", ":", "df", "=", "pd", ".", "DataFrame", "(", "result", ",", "columns", "=", "[", "\"pointType\"", ",", "\"pointAddress\"", ",", "\"pointName\"", ",", "\"description\"", ",", "\"presentValue\"", ",", "\"units_state\"", ",", "]", ",", ")", ".", "set_index", "(", "[", "\"pointName\"", "]", ")", "else", ":", "df", "=", "result", "# print('Ready!')", "return", "(", "deviceName", ",", "pss", ",", "objList", ",", "df", ",", "points", ")" ]
Discover the BACnet points in a BACnet device. :param bacnetApp: The app itself so we can call read :param address: address of the device as a string (ex. '2:5') :param devID: device ID of the bacnet device as a string (ex. '1001') :returns: a tuple with deviceName, pss, objList, df * *deviceName* : name of the device * *pss* : protocole service supported * *objList* : list of bacnet object (ex. analogInput, 1) * *df* : is a dataFrame containing pointType, pointAddress, pointName, description presentValue and units If pandas can't be found, df will be a simple array
[ "Discover", "the", "BACnet", "points", "in", "a", "BACnet", "device", "." ]
python
train
31.428571
vkruoso/receita-tools
receita/tools/get.py
https://github.com/vkruoso/receita-tools/blob/fd62a252c76541c9feac6470b9048b31348ffe86/receita/tools/get.py#L88-L125
def valid(self, cnpj): """Check if a CNPJ is valid. We should avoid sending invalid CNPJ to the web service as we know it is going to be a waste of bandwidth. Assumes CNPJ is a string. """ if len(cnpj) != 14: return False tam = 12 nums = cnpj[:tam] digs = cnpj[tam:] tot = 0 pos = tam-7 for i in range(tam, 0, -1): tot = tot + int(nums[tam-i])*pos pos = pos - 1 if pos < 2: pos = 9 res = 0 if tot % 11 < 2 else 11 - (tot % 11) if res != int(digs[0]): return False tam = tam + 1 nums = cnpj[:tam] tot = 0 pos = tam-7 for i in range(tam, 0, -1): tot = tot + int(nums[tam-i])*pos pos = pos - 1 if pos < 2: pos = 9 res = 0 if tot % 11 < 2 else 11 - (tot % 11) if res != int(digs[1]): return False return True
[ "def", "valid", "(", "self", ",", "cnpj", ")", ":", "if", "len", "(", "cnpj", ")", "!=", "14", ":", "return", "False", "tam", "=", "12", "nums", "=", "cnpj", "[", ":", "tam", "]", "digs", "=", "cnpj", "[", "tam", ":", "]", "tot", "=", "0", "pos", "=", "tam", "-", "7", "for", "i", "in", "range", "(", "tam", ",", "0", ",", "-", "1", ")", ":", "tot", "=", "tot", "+", "int", "(", "nums", "[", "tam", "-", "i", "]", ")", "*", "pos", "pos", "=", "pos", "-", "1", "if", "pos", "<", "2", ":", "pos", "=", "9", "res", "=", "0", "if", "tot", "%", "11", "<", "2", "else", "11", "-", "(", "tot", "%", "11", ")", "if", "res", "!=", "int", "(", "digs", "[", "0", "]", ")", ":", "return", "False", "tam", "=", "tam", "+", "1", "nums", "=", "cnpj", "[", ":", "tam", "]", "tot", "=", "0", "pos", "=", "tam", "-", "7", "for", "i", "in", "range", "(", "tam", ",", "0", ",", "-", "1", ")", ":", "tot", "=", "tot", "+", "int", "(", "nums", "[", "tam", "-", "i", "]", ")", "*", "pos", "pos", "=", "pos", "-", "1", "if", "pos", "<", "2", ":", "pos", "=", "9", "res", "=", "0", "if", "tot", "%", "11", "<", "2", "else", "11", "-", "(", "tot", "%", "11", ")", "if", "res", "!=", "int", "(", "digs", "[", "1", "]", ")", ":", "return", "False", "return", "True" ]
Check if a CNPJ is valid. We should avoid sending invalid CNPJ to the web service as we know it is going to be a waste of bandwidth. Assumes CNPJ is a string.
[ "Check", "if", "a", "CNPJ", "is", "valid", "." ]
python
train
25.789474
MLAB-project/pymlab
src/pymlab/sensors/clkgen.py
https://github.com/MLAB-project/pymlab/blob/d18d858ae83b203defcf2aead0dbd11b3c444658/src/pymlab/sensors/clkgen.py#L102-L139
def set_freq(self, fout, freq): """ Sets new output frequency, required parameters are real current frequency at output and new required frequency. """ hsdiv_tuple = (4, 5, 6, 7, 9, 11) # possible dividers n1div_tuple = (1,) + tuple(range(2,129,2)) # fdco_min = 5670.0 # set maximum as minimum hsdiv = self.get_hs_div() # read curent dividers n1div = self.get_n1_div() # if abs((freq-fout)*1e6/fout) > 3500: # Large change of frequency fdco = fout * hsdiv * n1div # calculate high frequency oscillator fxtal = fdco / self.get_rfreq() # should be fxtal = 114.285 for hsdiv_iter in hsdiv_tuple: # find dividers with minimal power consumption for n1div_iter in n1div_tuple: fdco_new = freq * hsdiv_iter * n1div_iter if (fdco_new >= 4850) and (fdco_new <= 5670): if (fdco_new <= fdco_min): fdco_min = fdco_new hsdiv = hsdiv_iter n1div = n1div_iter rfreq = fdco_min / fxtal self.freeze_dco() # write registers self.set_hs_div(hsdiv) self.set_n1_div(n1div) self.set_rfreq(rfreq) self.unfreeze_dco() self.new_freq() else: # Small change of frequency rfreq = self.get_rfreq() * (freq/fout) self.freeze_m() # write registers self.set_rfreq(rfreq) self.unfreeze_m()
[ "def", "set_freq", "(", "self", ",", "fout", ",", "freq", ")", ":", "hsdiv_tuple", "=", "(", "4", ",", "5", ",", "6", ",", "7", ",", "9", ",", "11", ")", "# possible dividers", "n1div_tuple", "=", "(", "1", ",", ")", "+", "tuple", "(", "range", "(", "2", ",", "129", ",", "2", ")", ")", "#", "fdco_min", "=", "5670.0", "# set maximum as minimum", "hsdiv", "=", "self", ".", "get_hs_div", "(", ")", "# read curent dividers", "n1div", "=", "self", ".", "get_n1_div", "(", ")", "#", "if", "abs", "(", "(", "freq", "-", "fout", ")", "*", "1e6", "/", "fout", ")", ">", "3500", ":", "# Large change of frequency ", "fdco", "=", "fout", "*", "hsdiv", "*", "n1div", "# calculate high frequency oscillator", "fxtal", "=", "fdco", "/", "self", ".", "get_rfreq", "(", ")", "# should be fxtal = 114.285 ", "for", "hsdiv_iter", "in", "hsdiv_tuple", ":", "# find dividers with minimal power consumption", "for", "n1div_iter", "in", "n1div_tuple", ":", "fdco_new", "=", "freq", "*", "hsdiv_iter", "*", "n1div_iter", "if", "(", "fdco_new", ">=", "4850", ")", "and", "(", "fdco_new", "<=", "5670", ")", ":", "if", "(", "fdco_new", "<=", "fdco_min", ")", ":", "fdco_min", "=", "fdco_new", "hsdiv", "=", "hsdiv_iter", "n1div", "=", "n1div_iter", "rfreq", "=", "fdco_min", "/", "fxtal", "self", ".", "freeze_dco", "(", ")", "# write registers", "self", ".", "set_hs_div", "(", "hsdiv", ")", "self", ".", "set_n1_div", "(", "n1div", ")", "self", ".", "set_rfreq", "(", "rfreq", ")", "self", ".", "unfreeze_dco", "(", ")", "self", ".", "new_freq", "(", ")", "else", ":", "# Small change of frequency", "rfreq", "=", "self", ".", "get_rfreq", "(", ")", "*", "(", "freq", "/", "fout", ")", "self", ".", "freeze_m", "(", ")", "# write registers ", "self", ".", "set_rfreq", "(", "rfreq", ")", "self", ".", "unfreeze_m", "(", ")" ]
Sets new output frequency, required parameters are real current frequency at output and new required frequency.
[ "Sets", "new", "output", "frequency", "required", "parameters", "are", "real", "current", "frequency", "at", "output", "and", "new", "required", "frequency", "." ]
python
train
43.763158
iotile/coretools
iotilegateway/iotilegateway/main.py
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/main.py#L54-L107
def main(argv=None, loop=SharedLoop, max_time=None): """Main entry point for iotile-gateway.""" should_raise = argv is not None if argv is None: argv = sys.argv[1:] parser = build_parser() cmd_args = parser.parse_args(argv) configure_logging(cmd_args.verbose) logger = logging.getLogger(__name__) try: args = {} if cmd_args.config is not None: try: with open(cmd_args.config, "r") as conf: args = json.load(conf) except IOError as exc: raise ScriptError("Could not open config file %s due to %s" % (cmd_args.config, str(exc)), 2) except ValueError as exc: raise ScriptError("Could not parse JSON from config file %s due to %s" % (cmd_args.config, str(exc)), 3) except TypeError as exc: raise ScriptError("You must pass the path to a json config file", 4) logger.critical("Starting gateway") gateway = IOTileGateway(args, loop=loop) loop.run_coroutine(gateway.start()) logger.critical("Gateway running") # Run forever until we receive a ctrl-c # (allow quitting early after max_time seconds for testing) loop.wait_for_interrupt(max_time=max_time) loop.run_coroutine(gateway.stop()) except ScriptError as exc: if should_raise: raise exc logger.fatal("Quitting due to error: %s", exc.msg) return exc.code except Exception as exc: # pylint: disable=W0703 if should_raise: raise exc logger.exception("Fatal error running gateway") return 1 return 0
[ "def", "main", "(", "argv", "=", "None", ",", "loop", "=", "SharedLoop", ",", "max_time", "=", "None", ")", ":", "should_raise", "=", "argv", "is", "not", "None", "if", "argv", "is", "None", ":", "argv", "=", "sys", ".", "argv", "[", "1", ":", "]", "parser", "=", "build_parser", "(", ")", "cmd_args", "=", "parser", ".", "parse_args", "(", "argv", ")", "configure_logging", "(", "cmd_args", ".", "verbose", ")", "logger", "=", "logging", ".", "getLogger", "(", "__name__", ")", "try", ":", "args", "=", "{", "}", "if", "cmd_args", ".", "config", "is", "not", "None", ":", "try", ":", "with", "open", "(", "cmd_args", ".", "config", ",", "\"r\"", ")", "as", "conf", ":", "args", "=", "json", ".", "load", "(", "conf", ")", "except", "IOError", "as", "exc", ":", "raise", "ScriptError", "(", "\"Could not open config file %s due to %s\"", "%", "(", "cmd_args", ".", "config", ",", "str", "(", "exc", ")", ")", ",", "2", ")", "except", "ValueError", "as", "exc", ":", "raise", "ScriptError", "(", "\"Could not parse JSON from config file %s due to %s\"", "%", "(", "cmd_args", ".", "config", ",", "str", "(", "exc", ")", ")", ",", "3", ")", "except", "TypeError", "as", "exc", ":", "raise", "ScriptError", "(", "\"You must pass the path to a json config file\"", ",", "4", ")", "logger", ".", "critical", "(", "\"Starting gateway\"", ")", "gateway", "=", "IOTileGateway", "(", "args", ",", "loop", "=", "loop", ")", "loop", ".", "run_coroutine", "(", "gateway", ".", "start", "(", ")", ")", "logger", ".", "critical", "(", "\"Gateway running\"", ")", "# Run forever until we receive a ctrl-c", "# (allow quitting early after max_time seconds for testing)", "loop", ".", "wait_for_interrupt", "(", "max_time", "=", "max_time", ")", "loop", ".", "run_coroutine", "(", "gateway", ".", "stop", "(", ")", ")", "except", "ScriptError", "as", "exc", ":", "if", "should_raise", ":", "raise", "exc", "logger", ".", "fatal", "(", "\"Quitting due to error: %s\"", ",", "exc", ".", "msg", ")", "return", "exc", ".", "code", "except", "Exception", "as", "exc", ":", "# pylint: disable=W0703", "if", "should_raise", ":", "raise", "exc", "logger", ".", "exception", "(", "\"Fatal error running gateway\"", ")", "return", "1", "return", "0" ]
Main entry point for iotile-gateway.
[ "Main", "entry", "point", "for", "iotile", "-", "gateway", "." ]
python
train
31.611111
IdentityPython/pysaml2
src/saml2/cryptography/asymmetric.py
https://github.com/IdentityPython/pysaml2/blob/d3aa78eeb7d37c12688f783cb4db1c7263a14ad6/src/saml2/cryptography/asymmetric.py#L16-L20
def key_sign(rsakey, message, digest): """Sign the given message with the RSA key.""" padding = _asymmetric.padding.PKCS1v15() signature = rsakey.sign(message, padding, digest) return signature
[ "def", "key_sign", "(", "rsakey", ",", "message", ",", "digest", ")", ":", "padding", "=", "_asymmetric", ".", "padding", ".", "PKCS1v15", "(", ")", "signature", "=", "rsakey", ".", "sign", "(", "message", ",", "padding", ",", "digest", ")", "return", "signature" ]
Sign the given message with the RSA key.
[ "Sign", "the", "given", "message", "with", "the", "RSA", "key", "." ]
python
train
41
Alignak-monitoring/alignak
alignak/scheduler.py
https://github.com/Alignak-monitoring/alignak/blob/f3c145207e83159b799d3714e4241399c7740a64/alignak/scheduler.py#L600-L607
def hook_point(self, hook_name): """Generic function to call modules methods if such method is avalaible :param hook_name: function name to call :type hook_name: str :return:None """ self.my_daemon.hook_point(hook_name=hook_name, handle=self)
[ "def", "hook_point", "(", "self", ",", "hook_name", ")", ":", "self", ".", "my_daemon", ".", "hook_point", "(", "hook_name", "=", "hook_name", ",", "handle", "=", "self", ")" ]
Generic function to call modules methods if such method is avalaible :param hook_name: function name to call :type hook_name: str :return:None
[ "Generic", "function", "to", "call", "modules", "methods", "if", "such", "method", "is", "avalaible" ]
python
train
35.5
roclark/sportsreference
sportsreference/mlb/schedule.py
https://github.com/roclark/sportsreference/blob/ea0bae432be76450e137671d2998eb38f962dffd/sportsreference/mlb/schedule.py#L172-L179
def datetime(self): """ Returns a datetime object of the month, day, year, and time the game was played. """ date_string = '%s %s' % (self._date, self._year) date_string = re.sub(r' \(\d+\)', '', date_string) return datetime.strptime(date_string, '%A, %b %d %Y')
[ "def", "datetime", "(", "self", ")", ":", "date_string", "=", "'%s %s'", "%", "(", "self", ".", "_date", ",", "self", ".", "_year", ")", "date_string", "=", "re", ".", "sub", "(", "r' \\(\\d+\\)'", ",", "''", ",", "date_string", ")", "return", "datetime", ".", "strptime", "(", "date_string", ",", "'%A, %b %d %Y'", ")" ]
Returns a datetime object of the month, day, year, and time the game was played.
[ "Returns", "a", "datetime", "object", "of", "the", "month", "day", "year", "and", "time", "the", "game", "was", "played", "." ]
python
train
38.875
paramiko/paramiko
paramiko/channel.py
https://github.com/paramiko/paramiko/blob/cf7d49d66f3b1fbc8b0853518a54050182b3b5eb/paramiko/channel.py#L233-L257
def exec_command(self, command): """ Execute a command on the server. If the server allows it, the channel will then be directly connected to the stdin, stdout, and stderr of the command being executed. When the command finishes executing, the channel will be closed and can't be reused. You must open a new channel if you wish to execute another command. :param str command: a shell command to execute. :raises: `.SSHException` -- if the request was rejected or the channel was closed """ m = Message() m.add_byte(cMSG_CHANNEL_REQUEST) m.add_int(self.remote_chanid) m.add_string("exec") m.add_boolean(True) m.add_string(command) self._event_pending() self.transport._send_user_message(m) self._wait_for_event()
[ "def", "exec_command", "(", "self", ",", "command", ")", ":", "m", "=", "Message", "(", ")", "m", ".", "add_byte", "(", "cMSG_CHANNEL_REQUEST", ")", "m", ".", "add_int", "(", "self", ".", "remote_chanid", ")", "m", ".", "add_string", "(", "\"exec\"", ")", "m", ".", "add_boolean", "(", "True", ")", "m", ".", "add_string", "(", "command", ")", "self", ".", "_event_pending", "(", ")", "self", ".", "transport", ".", "_send_user_message", "(", "m", ")", "self", ".", "_wait_for_event", "(", ")" ]
Execute a command on the server. If the server allows it, the channel will then be directly connected to the stdin, stdout, and stderr of the command being executed. When the command finishes executing, the channel will be closed and can't be reused. You must open a new channel if you wish to execute another command. :param str command: a shell command to execute. :raises: `.SSHException` -- if the request was rejected or the channel was closed
[ "Execute", "a", "command", "on", "the", "server", ".", "If", "the", "server", "allows", "it", "the", "channel", "will", "then", "be", "directly", "connected", "to", "the", "stdin", "stdout", "and", "stderr", "of", "the", "command", "being", "executed", "." ]
python
train
34.72
jjgomera/iapws
iapws/iapws97.py
https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L3854-L3925
def _Bound_Ph(P, h): """Region definition for input P y h Parameters ---------- P : float Pressure, [MPa] h : float Specific enthalpy, [kJ/kg] Returns ------- region : float IAPWS-97 region code References ---------- Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer, 2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.5 """ region = None if Pmin <= P <= Ps_623: h14 = _Region1(_TSat_P(P), P)["h"] h24 = _Region2(_TSat_P(P), P)["h"] h25 = _Region2(1073.15, P)["h"] hmin = _Region1(273.15, P)["h"] hmax = _Region5(2273.15, P)["h"] if hmin <= h <= h14: region = 1 elif h14 < h < h24: region = 4 elif h24 <= h <= h25: region = 2 elif h25 < h <= hmax: region = 5 elif Ps_623 < P < Pc: hmin = _Region1(273.15, P)["h"] h13 = _Region1(623.15, P)["h"] h32 = _Region2(_t_P(P), P)["h"] h25 = _Region2(1073.15, P)["h"] hmax = _Region5(2273.15, P)["h"] if hmin <= h <= h13: region = 1 elif h13 < h < h32: try: p34 = _PSat_h(h) except NotImplementedError: p34 = Pc if P < p34: region = 4 else: region = 3 elif h32 <= h <= h25: region = 2 elif h25 < h <= hmax: region = 5 elif Pc <= P <= 100: hmin = _Region1(273.15, P)["h"] h13 = _Region1(623.15, P)["h"] h32 = _Region2(_t_P(P), P)["h"] h25 = _Region2(1073.15, P)["h"] hmax = _Region5(2273.15, P)["h"] if hmin <= h <= h13: region = 1 elif h13 < h < h32: region = 3 elif h32 <= h <= h25: region = 2 elif P <= 50 and h25 <= h <= hmax: region = 5 return region
[ "def", "_Bound_Ph", "(", "P", ",", "h", ")", ":", "region", "=", "None", "if", "Pmin", "<=", "P", "<=", "Ps_623", ":", "h14", "=", "_Region1", "(", "_TSat_P", "(", "P", ")", ",", "P", ")", "[", "\"h\"", "]", "h24", "=", "_Region2", "(", "_TSat_P", "(", "P", ")", ",", "P", ")", "[", "\"h\"", "]", "h25", "=", "_Region2", "(", "1073.15", ",", "P", ")", "[", "\"h\"", "]", "hmin", "=", "_Region1", "(", "273.15", ",", "P", ")", "[", "\"h\"", "]", "hmax", "=", "_Region5", "(", "2273.15", ",", "P", ")", "[", "\"h\"", "]", "if", "hmin", "<=", "h", "<=", "h14", ":", "region", "=", "1", "elif", "h14", "<", "h", "<", "h24", ":", "region", "=", "4", "elif", "h24", "<=", "h", "<=", "h25", ":", "region", "=", "2", "elif", "h25", "<", "h", "<=", "hmax", ":", "region", "=", "5", "elif", "Ps_623", "<", "P", "<", "Pc", ":", "hmin", "=", "_Region1", "(", "273.15", ",", "P", ")", "[", "\"h\"", "]", "h13", "=", "_Region1", "(", "623.15", ",", "P", ")", "[", "\"h\"", "]", "h32", "=", "_Region2", "(", "_t_P", "(", "P", ")", ",", "P", ")", "[", "\"h\"", "]", "h25", "=", "_Region2", "(", "1073.15", ",", "P", ")", "[", "\"h\"", "]", "hmax", "=", "_Region5", "(", "2273.15", ",", "P", ")", "[", "\"h\"", "]", "if", "hmin", "<=", "h", "<=", "h13", ":", "region", "=", "1", "elif", "h13", "<", "h", "<", "h32", ":", "try", ":", "p34", "=", "_PSat_h", "(", "h", ")", "except", "NotImplementedError", ":", "p34", "=", "Pc", "if", "P", "<", "p34", ":", "region", "=", "4", "else", ":", "region", "=", "3", "elif", "h32", "<=", "h", "<=", "h25", ":", "region", "=", "2", "elif", "h25", "<", "h", "<=", "hmax", ":", "region", "=", "5", "elif", "Pc", "<=", "P", "<=", "100", ":", "hmin", "=", "_Region1", "(", "273.15", ",", "P", ")", "[", "\"h\"", "]", "h13", "=", "_Region1", "(", "623.15", ",", "P", ")", "[", "\"h\"", "]", "h32", "=", "_Region2", "(", "_t_P", "(", "P", ")", ",", "P", ")", "[", "\"h\"", "]", "h25", "=", "_Region2", "(", "1073.15", ",", "P", ")", "[", "\"h\"", "]", "hmax", "=", "_Region5", "(", "2273.15", ",", "P", ")", "[", "\"h\"", "]", "if", "hmin", "<=", "h", "<=", "h13", ":", "region", "=", "1", "elif", "h13", "<", "h", "<", "h32", ":", "region", "=", "3", "elif", "h32", "<=", "h", "<=", "h25", ":", "region", "=", "2", "elif", "P", "<=", "50", "and", "h25", "<=", "h", "<=", "hmax", ":", "region", "=", "5", "return", "region" ]
Region definition for input P y h Parameters ---------- P : float Pressure, [MPa] h : float Specific enthalpy, [kJ/kg] Returns ------- region : float IAPWS-97 region code References ---------- Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer, 2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.5
[ "Region", "definition", "for", "input", "P", "y", "h" ]
python
train
27.416667
xray7224/PyPump
pypump/models/feed.py
https://github.com/xray7224/PyPump/blob/f921f691c39fe021f4fd124b6bc91718c9e49b4a/pypump/models/feed.py#L537-L560
def create(self, display_name, content=None): """ Create a new user list :class:`collection <pypump.models.collection.Collection>`. :param display_name: List title. :param content: (optional) List description. Example: >>> pump.me.lists.create(display_name='Friends', content='List of friends') >>> myfriends = pump.me.lists['Friends'] >>> print(myfriends) Friends """ activity = { "verb": "create", "object": { "objectType": "collection", "objectTypes": [self.membertype], "displayName": display_name, "content": content } } if self._post_activity(activity, unserialize=False): return self[display_name]
[ "def", "create", "(", "self", ",", "display_name", ",", "content", "=", "None", ")", ":", "activity", "=", "{", "\"verb\"", ":", "\"create\"", ",", "\"object\"", ":", "{", "\"objectType\"", ":", "\"collection\"", ",", "\"objectTypes\"", ":", "[", "self", ".", "membertype", "]", ",", "\"displayName\"", ":", "display_name", ",", "\"content\"", ":", "content", "}", "}", "if", "self", ".", "_post_activity", "(", "activity", ",", "unserialize", "=", "False", ")", ":", "return", "self", "[", "display_name", "]" ]
Create a new user list :class:`collection <pypump.models.collection.Collection>`. :param display_name: List title. :param content: (optional) List description. Example: >>> pump.me.lists.create(display_name='Friends', content='List of friends') >>> myfriends = pump.me.lists['Friends'] >>> print(myfriends) Friends
[ "Create", "a", "new", "user", "list", ":", "class", ":", "collection", "<pypump", ".", "models", ".", "collection", ".", "Collection", ">", "." ]
python
train
33.666667
pkkid/python-plexapi
plexapi/library.py
https://github.com/pkkid/python-plexapi/blob/9efbde96441c2bfbf410eacfb46e811e108e8bbc/plexapi/library.py#L31-L44
def sections(self): """ Returns a list of all media sections in this library. Library sections may be any of :class:`~plexapi.library.MovieSection`, :class:`~plexapi.library.ShowSection`, :class:`~plexapi.library.MusicSection`, :class:`~plexapi.library.PhotoSection`. """ key = '/library/sections' sections = [] for elem in self._server.query(key): for cls in (MovieSection, ShowSection, MusicSection, PhotoSection): if elem.attrib.get('type') == cls.TYPE: section = cls(self._server, elem, key) self._sectionsByID[section.key] = section sections.append(section) return sections
[ "def", "sections", "(", "self", ")", ":", "key", "=", "'/library/sections'", "sections", "=", "[", "]", "for", "elem", "in", "self", ".", "_server", ".", "query", "(", "key", ")", ":", "for", "cls", "in", "(", "MovieSection", ",", "ShowSection", ",", "MusicSection", ",", "PhotoSection", ")", ":", "if", "elem", ".", "attrib", ".", "get", "(", "'type'", ")", "==", "cls", ".", "TYPE", ":", "section", "=", "cls", "(", "self", ".", "_server", ",", "elem", ",", "key", ")", "self", ".", "_sectionsByID", "[", "section", ".", "key", "]", "=", "section", "sections", ".", "append", "(", "section", ")", "return", "sections" ]
Returns a list of all media sections in this library. Library sections may be any of :class:`~plexapi.library.MovieSection`, :class:`~plexapi.library.ShowSection`, :class:`~plexapi.library.MusicSection`, :class:`~plexapi.library.PhotoSection`.
[ "Returns", "a", "list", "of", "all", "media", "sections", "in", "this", "library", ".", "Library", "sections", "may", "be", "any", "of", ":", "class", ":", "~plexapi", ".", "library", ".", "MovieSection", ":", "class", ":", "~plexapi", ".", "library", ".", "ShowSection", ":", "class", ":", "~plexapi", ".", "library", ".", "MusicSection", ":", "class", ":", "~plexapi", ".", "library", ".", "PhotoSection", "." ]
python
train
51.785714
sdispater/poetry
poetry/utils/env.py
https://github.com/sdispater/poetry/blob/2d27acd76c165dd49f11934520a7973de7a3762a/poetry/utils/env.py#L391-L399
def _bin(self, bin): # type: (str) -> str """ Return path to the given executable. """ bin_path = (self._bin_dir / bin).with_suffix(".exe" if self._is_windows else "") if not bin_path.exists(): return bin return str(bin_path)
[ "def", "_bin", "(", "self", ",", "bin", ")", ":", "# type: (str) -> str", "bin_path", "=", "(", "self", ".", "_bin_dir", "/", "bin", ")", ".", "with_suffix", "(", "\".exe\"", "if", "self", ".", "_is_windows", "else", "\"\"", ")", "if", "not", "bin_path", ".", "exists", "(", ")", ":", "return", "bin", "return", "str", "(", "bin_path", ")" ]
Return path to the given executable.
[ "Return", "path", "to", "the", "given", "executable", "." ]
python
train
31
pydata/xarray
xarray/core/accessors.py
https://github.com/pydata/xarray/blob/6d93a95d05bdbfc33fff24064f67d29dd891ab58/xarray/core/accessors.py#L87-L110
def _round_field(values, name, freq): """Indirectly access pandas rounding functions by wrapping data as a Series and calling through `.dt` attribute. Parameters ---------- values : np.ndarray or dask.array-like Array-like container of datetime-like values name : str (ceil, floor, round) Name of rounding function freq : a freq string indicating the rounding resolution Returns ------- rounded timestamps : same type as values Array-like of datetime fields accessed for each element in values """ if isinstance(values, dask_array_type): from dask.array import map_blocks return map_blocks(_round_series, values, name, freq=freq, dtype=np.datetime64) else: return _round_series(values, name, freq)
[ "def", "_round_field", "(", "values", ",", "name", ",", "freq", ")", ":", "if", "isinstance", "(", "values", ",", "dask_array_type", ")", ":", "from", "dask", ".", "array", "import", "map_blocks", "return", "map_blocks", "(", "_round_series", ",", "values", ",", "name", ",", "freq", "=", "freq", ",", "dtype", "=", "np", ".", "datetime64", ")", "else", ":", "return", "_round_series", "(", "values", ",", "name", ",", "freq", ")" ]
Indirectly access pandas rounding functions by wrapping data as a Series and calling through `.dt` attribute. Parameters ---------- values : np.ndarray or dask.array-like Array-like container of datetime-like values name : str (ceil, floor, round) Name of rounding function freq : a freq string indicating the rounding resolution Returns ------- rounded timestamps : same type as values Array-like of datetime fields accessed for each element in values
[ "Indirectly", "access", "pandas", "rounding", "functions", "by", "wrapping", "data", "as", "a", "Series", "and", "calling", "through", ".", "dt", "attribute", "." ]
python
train
33.458333
lrq3000/pyFileFixity
pyFileFixity/lib/profilers/visual/pympler/classtracker.py
https://github.com/lrq3000/pyFileFixity/blob/fd5ef23bb13835faf1e3baa773619b86a1cc9bdf/pyFileFixity/lib/profilers/visual/pympler/classtracker.py#L334-L338
def _track_modify(self, cls, name, detail, keep, trace): """ Modify settings of a tracked class """ self._observers[cls].modify(name, detail, keep, trace)
[ "def", "_track_modify", "(", "self", ",", "cls", ",", "name", ",", "detail", ",", "keep", ",", "trace", ")", ":", "self", ".", "_observers", "[", "cls", "]", ".", "modify", "(", "name", ",", "detail", ",", "keep", ",", "trace", ")" ]
Modify settings of a tracked class
[ "Modify", "settings", "of", "a", "tracked", "class" ]
python
train
36.4
gwastro/pycbc
pycbc/tmpltbank/em_progenitors.py
https://github.com/gwastro/pycbc/blob/7a64cdd104d263f1b6ea0b01e6841837d05a4cb3/pycbc/tmpltbank/em_progenitors.py#L199-L229
def pos_branch(incl, chi): """ Determines the effective [as defined in Stone, Loeb, Berger, PRD 87, 084053 (2013)] aligned dimensionless spin parameter of a NS-BH binary with tilted BH spin. This means finding the root chi_eff of ISCO_eq_chi_first(chi_eff, PG_ISSO_solver(chi,incl)). The result returned by this function belongs to the branch of the greater solutions, i.e. the greater of the two possible solutions is returned. Parameters ----------- incl: float the inclination angle between the BH spin and the orbital angular momentum in radians chi: float the BH dimensionless spin parameter Returns ---------- chi_eff: float the (greater) effective dimensionless spin parameter solution """ if incl == 0: chi_eff = chi else: rISSO = PG_ISSO_solver(chi,incl) chi_eff = scipy.optimize.fsolve(ISCO_eq_chi_first, 1.0, args=(rISSO)) return chi_eff
[ "def", "pos_branch", "(", "incl", ",", "chi", ")", ":", "if", "incl", "==", "0", ":", "chi_eff", "=", "chi", "else", ":", "rISSO", "=", "PG_ISSO_solver", "(", "chi", ",", "incl", ")", "chi_eff", "=", "scipy", ".", "optimize", ".", "fsolve", "(", "ISCO_eq_chi_first", ",", "1.0", ",", "args", "=", "(", "rISSO", ")", ")", "return", "chi_eff" ]
Determines the effective [as defined in Stone, Loeb, Berger, PRD 87, 084053 (2013)] aligned dimensionless spin parameter of a NS-BH binary with tilted BH spin. This means finding the root chi_eff of ISCO_eq_chi_first(chi_eff, PG_ISSO_solver(chi,incl)). The result returned by this function belongs to the branch of the greater solutions, i.e. the greater of the two possible solutions is returned. Parameters ----------- incl: float the inclination angle between the BH spin and the orbital angular momentum in radians chi: float the BH dimensionless spin parameter Returns ---------- chi_eff: float the (greater) effective dimensionless spin parameter solution
[ "Determines", "the", "effective", "[", "as", "defined", "in", "Stone", "Loeb", "Berger", "PRD", "87", "084053", "(", "2013", ")", "]", "aligned", "dimensionless", "spin", "parameter", "of", "a", "NS", "-", "BH", "binary", "with", "tilted", "BH", "spin", ".", "This", "means", "finding", "the", "root", "chi_eff", "of", "ISCO_eq_chi_first", "(", "chi_eff", "PG_ISSO_solver", "(", "chi", "incl", "))", ".", "The", "result", "returned", "by", "this", "function", "belongs", "to", "the", "branch", "of", "the", "greater", "solutions", "i", ".", "e", ".", "the", "greater", "of", "the", "two", "possible", "solutions", "is", "returned", "." ]
python
train
30.870968
OSSOS/MOP
src/ossos/core/ossos/downloads/cutouts/source.py
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/downloads/cutouts/source.py#L298-L328
def get_observed_magnitude(self, centroid=True): # NOTE: this import is only here so that we don't load up IRAF # unnecessarily (ex: for candidates processing). """ Get the magnitude at the current pixel x/y location. :return: Table """ max_count = float(self.astrom_header.get("MAXCOUNT", 30000)) (x, y, hdulist_index) = self.pixel_coord tmp_file = self._hdu_on_disk(hdulist_index) try: from ossos import daophot phot = daophot.phot_mag(tmp_file, x, y, aperture=self.apcor.aperture, sky=self.apcor.sky, swidth=self.apcor.swidth, apcor=self.apcor.apcor, zmag=self.zmag, maxcount=max_count, extno=1, centroid=centroid) if not self.apcor.valid: phot['PIER'][0] = 1 return phot except Exception as ex: print ex raise ex finally: self.close()
[ "def", "get_observed_magnitude", "(", "self", ",", "centroid", "=", "True", ")", ":", "# NOTE: this import is only here so that we don't load up IRAF", "# unnecessarily (ex: for candidates processing).", "max_count", "=", "float", "(", "self", ".", "astrom_header", ".", "get", "(", "\"MAXCOUNT\"", ",", "30000", ")", ")", "(", "x", ",", "y", ",", "hdulist_index", ")", "=", "self", ".", "pixel_coord", "tmp_file", "=", "self", ".", "_hdu_on_disk", "(", "hdulist_index", ")", "try", ":", "from", "ossos", "import", "daophot", "phot", "=", "daophot", ".", "phot_mag", "(", "tmp_file", ",", "x", ",", "y", ",", "aperture", "=", "self", ".", "apcor", ".", "aperture", ",", "sky", "=", "self", ".", "apcor", ".", "sky", ",", "swidth", "=", "self", ".", "apcor", ".", "swidth", ",", "apcor", "=", "self", ".", "apcor", ".", "apcor", ",", "zmag", "=", "self", ".", "zmag", ",", "maxcount", "=", "max_count", ",", "extno", "=", "1", ",", "centroid", "=", "centroid", ")", "if", "not", "self", ".", "apcor", ".", "valid", ":", "phot", "[", "'PIER'", "]", "[", "0", "]", "=", "1", "return", "phot", "except", "Exception", "as", "ex", ":", "print", "ex", "raise", "ex", "finally", ":", "self", ".", "close", "(", ")" ]
Get the magnitude at the current pixel x/y location. :return: Table
[ "Get", "the", "magnitude", "at", "the", "current", "pixel", "x", "/", "y", "location", "." ]
python
train
38.516129
nyergler/hieroglyph
src/hieroglyph/builder.py
https://github.com/nyergler/hieroglyph/blob/1ef062fad5060006566f8d6bd3b5a231ac7e0488/src/hieroglyph/builder.py#L145-L167
def post_process_images(self, doctree): """Pick the best candidate for all image URIs.""" super(AbstractSlideBuilder, self).post_process_images(doctree) # figure out where this doctree is in relation to the srcdir relative_base = ( ['..'] * doctree.attributes.get('source')[len(self.srcdir) + 1:].count('/') ) for node in doctree.traverse(nodes.image): if node.get('candidates') is None: node['candidates'] = ('*',) # fix up images with absolute paths if node['uri'].startswith(self.outdir): node['uri'] = '/'.join( relative_base + [ node['uri'][len(self.outdir) + 1:] ] )
[ "def", "post_process_images", "(", "self", ",", "doctree", ")", ":", "super", "(", "AbstractSlideBuilder", ",", "self", ")", ".", "post_process_images", "(", "doctree", ")", "# figure out where this doctree is in relation to the srcdir", "relative_base", "=", "(", "[", "'..'", "]", "*", "doctree", ".", "attributes", ".", "get", "(", "'source'", ")", "[", "len", "(", "self", ".", "srcdir", ")", "+", "1", ":", "]", ".", "count", "(", "'/'", ")", ")", "for", "node", "in", "doctree", ".", "traverse", "(", "nodes", ".", "image", ")", ":", "if", "node", ".", "get", "(", "'candidates'", ")", "is", "None", ":", "node", "[", "'candidates'", "]", "=", "(", "'*'", ",", ")", "# fix up images with absolute paths", "if", "node", "[", "'uri'", "]", ".", "startswith", "(", "self", ".", "outdir", ")", ":", "node", "[", "'uri'", "]", "=", "'/'", ".", "join", "(", "relative_base", "+", "[", "node", "[", "'uri'", "]", "[", "len", "(", "self", ".", "outdir", ")", "+", "1", ":", "]", "]", ")" ]
Pick the best candidate for all image URIs.
[ "Pick", "the", "best", "candidate", "for", "all", "image", "URIs", "." ]
python
train
33.695652
NiklasRosenstein-Python/nr-deprecated
nr/stream.py
https://github.com/NiklasRosenstein-Python/nr-deprecated/blob/f9f8b89ea1b084841a8ab65784eaf68852686b2a/nr/stream.py#L70-L85
def unique(cls, iterable, key=None): """ Yields unique items from *iterable* whilst preserving the original order. """ if key is None: key = lambda x: x def generator(): seen = set() seen_add = seen.add for item in iterable: key_val = key(item) if key_val not in seen: seen_add(key_val) yield item return cls(generator())
[ "def", "unique", "(", "cls", ",", "iterable", ",", "key", "=", "None", ")", ":", "if", "key", "is", "None", ":", "key", "=", "lambda", "x", ":", "x", "def", "generator", "(", ")", ":", "seen", "=", "set", "(", ")", "seen_add", "=", "seen", ".", "add", "for", "item", "in", "iterable", ":", "key_val", "=", "key", "(", "item", ")", "if", "key_val", "not", "in", "seen", ":", "seen_add", "(", "key_val", ")", "yield", "item", "return", "cls", "(", "generator", "(", ")", ")" ]
Yields unique items from *iterable* whilst preserving the original order.
[ "Yields", "unique", "items", "from", "*", "iterable", "*", "whilst", "preserving", "the", "original", "order", "." ]
python
train
24.4375
whiteclover/dbpy
db/query/insert.py
https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/query/insert.py#L36-L47
def values(self, values): """The values for insert , it can be a dict row or list tuple row. """ if isinstance(values, dict): l = [] for column in self._columns: l.append(values[column]) self._values.append(tuple(l)) else: self._values.append(values) return self
[ "def", "values", "(", "self", ",", "values", ")", ":", "if", "isinstance", "(", "values", ",", "dict", ")", ":", "l", "=", "[", "]", "for", "column", "in", "self", ".", "_columns", ":", "l", ".", "append", "(", "values", "[", "column", "]", ")", "self", ".", "_values", ".", "append", "(", "tuple", "(", "l", ")", ")", "else", ":", "self", ".", "_values", ".", "append", "(", "values", ")", "return", "self" ]
The values for insert , it can be a dict row or list tuple row.
[ "The", "values", "for", "insert", "it", "can", "be", "a", "dict", "row", "or", "list", "tuple", "row", "." ]
python
train
30.666667
aquatix/python-utilkit
utilkit/fileutil.py
https://github.com/aquatix/python-utilkit/blob/1b4a4175381d2175592208619315f399610f915c/utilkit/fileutil.py#L72-L88
def list_files(dirname, extension=None): """ List all files in directory `dirname`, option to filter on file extension """ f = [] for (dirpath, dirnames, filenames) in os.walk(dirname): f.extend(filenames) break if extension is not None: # Filter on extension filtered = [] for filename in f: fn, ext = os.path.splitext(filename) if ext.lower() == '.' + extension.lower(): filtered.append(filename) f = filtered return f
[ "def", "list_files", "(", "dirname", ",", "extension", "=", "None", ")", ":", "f", "=", "[", "]", "for", "(", "dirpath", ",", "dirnames", ",", "filenames", ")", "in", "os", ".", "walk", "(", "dirname", ")", ":", "f", ".", "extend", "(", "filenames", ")", "break", "if", "extension", "is", "not", "None", ":", "# Filter on extension", "filtered", "=", "[", "]", "for", "filename", "in", "f", ":", "fn", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "if", "ext", ".", "lower", "(", ")", "==", "'.'", "+", "extension", ".", "lower", "(", ")", ":", "filtered", ".", "append", "(", "filename", ")", "f", "=", "filtered", "return", "f" ]
List all files in directory `dirname`, option to filter on file extension
[ "List", "all", "files", "in", "directory", "dirname", "option", "to", "filter", "on", "file", "extension" ]
python
train
30.588235
sanger-pathogens/circlator
circlator/merge.py
https://github.com/sanger-pathogens/circlator/blob/a4befb8c9dbbcd4b3ad1899a95aa3e689d58b638/circlator/merge.py#L158-L164
def _get_hit_nearest_ref_end(self, hits): '''Returns the hit nearest to the end of the ref sequence from the input list of hits''' nearest_to_end = hits[0] for hit in hits[1:]: if hit.ref_coords().end > nearest_to_end.ref_coords().end: nearest_to_end = hit return nearest_to_end
[ "def", "_get_hit_nearest_ref_end", "(", "self", ",", "hits", ")", ":", "nearest_to_end", "=", "hits", "[", "0", "]", "for", "hit", "in", "hits", "[", "1", ":", "]", ":", "if", "hit", ".", "ref_coords", "(", ")", ".", "end", ">", "nearest_to_end", ".", "ref_coords", "(", ")", ".", "end", ":", "nearest_to_end", "=", "hit", "return", "nearest_to_end" ]
Returns the hit nearest to the end of the ref sequence from the input list of hits
[ "Returns", "the", "hit", "nearest", "to", "the", "end", "of", "the", "ref", "sequence", "from", "the", "input", "list", "of", "hits" ]
python
train
47.428571
jaredLunde/vital-tools
vital/debug/__init__.py
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L947-L951
def randset(self): """ -> a #set of random integers """ return { self._map_type(int) for x in range(self.random.randint(3, 10))}
[ "def", "randset", "(", "self", ")", ":", "return", "{", "self", ".", "_map_type", "(", "int", ")", "for", "x", "in", "range", "(", "self", ".", "random", ".", "randint", "(", "3", ",", "10", ")", ")", "}" ]
-> a #set of random integers
[ "-", ">", "a", "#set", "of", "random", "integers" ]
python
train
32.8
openpaperwork/paperwork-backend
paperwork_backend/shell.py
https://github.com/openpaperwork/paperwork-backend/blob/114b831e94e039e68b339751fd18250877abad76/paperwork_backend/shell.py#L327-L401
def cmd_guess_labels(*args): """ Arguments: <document id> [-- [--apply]] Guess the labels that should be set on the document. Example: paperwork-shell guess_labels -- 20161207_1144_00_8 --apply Possible JSON replies: -- { "status": "error", "exception": "yyy", "reason": "xxxx", "args": "(xxxx, )" } -- { "status": "ok", "docid": "xxxx", "current_labels": ["label_a", "label_b"], "guessed_labels": ["label_b", "label_c"], "applied": "yes", } """ args = list(args) apply_labels = False if "--apply" in args: apply_labels = True args.remove("--apply") docid = args[0] dsearch = get_docsearch() doc = dsearch.get(docid) if doc is None: raise Exception( "Document {} not found. Cannot guess labels".format( docid ) ) verbose("Current labels: {}".format( ", ".join([label.name for label in doc.labels]) )) guessed = dsearch.guess_labels(doc) verbose("Guessed labels: {}".format( ", ".join([label.name for label in guessed]) )) r = { 'docid': doc.docid, 'current_labels': [label.name for label in doc.labels], 'guessed_labels': [label.name for label in guessed], 'applied': "yes" if apply_labels else "no", } changed = False if apply_labels: for label in guessed: if label not in doc.labels: dsearch.add_label(doc, label, update_index=False) changed = True for label in doc.labels: if label not in guessed: dsearch.remove_label(doc, label, update_index=False) changed = True if changed: index_updater = dsearch.get_index_updater(optimize=False) index_updater.upd_doc(doc) index_updater.commit() verbose("Document {} updated".format(docid)) elif apply_labels: verbose("Document {} unchanged".format(docid)) reply(r)
[ "def", "cmd_guess_labels", "(", "*", "args", ")", ":", "args", "=", "list", "(", "args", ")", "apply_labels", "=", "False", "if", "\"--apply\"", "in", "args", ":", "apply_labels", "=", "True", "args", ".", "remove", "(", "\"--apply\"", ")", "docid", "=", "args", "[", "0", "]", "dsearch", "=", "get_docsearch", "(", ")", "doc", "=", "dsearch", ".", "get", "(", "docid", ")", "if", "doc", "is", "None", ":", "raise", "Exception", "(", "\"Document {} not found. Cannot guess labels\"", ".", "format", "(", "docid", ")", ")", "verbose", "(", "\"Current labels: {}\"", ".", "format", "(", "\", \"", ".", "join", "(", "[", "label", ".", "name", "for", "label", "in", "doc", ".", "labels", "]", ")", ")", ")", "guessed", "=", "dsearch", ".", "guess_labels", "(", "doc", ")", "verbose", "(", "\"Guessed labels: {}\"", ".", "format", "(", "\", \"", ".", "join", "(", "[", "label", ".", "name", "for", "label", "in", "guessed", "]", ")", ")", ")", "r", "=", "{", "'docid'", ":", "doc", ".", "docid", ",", "'current_labels'", ":", "[", "label", ".", "name", "for", "label", "in", "doc", ".", "labels", "]", ",", "'guessed_labels'", ":", "[", "label", ".", "name", "for", "label", "in", "guessed", "]", ",", "'applied'", ":", "\"yes\"", "if", "apply_labels", "else", "\"no\"", ",", "}", "changed", "=", "False", "if", "apply_labels", ":", "for", "label", "in", "guessed", ":", "if", "label", "not", "in", "doc", ".", "labels", ":", "dsearch", ".", "add_label", "(", "doc", ",", "label", ",", "update_index", "=", "False", ")", "changed", "=", "True", "for", "label", "in", "doc", ".", "labels", ":", "if", "label", "not", "in", "guessed", ":", "dsearch", ".", "remove_label", "(", "doc", ",", "label", ",", "update_index", "=", "False", ")", "changed", "=", "True", "if", "changed", ":", "index_updater", "=", "dsearch", ".", "get_index_updater", "(", "optimize", "=", "False", ")", "index_updater", ".", "upd_doc", "(", "doc", ")", "index_updater", ".", "commit", "(", ")", "verbose", "(", "\"Document {} updated\"", ".", "format", "(", "docid", ")", ")", "elif", "apply_labels", ":", "verbose", "(", "\"Document {} unchanged\"", ".", "format", "(", "docid", ")", ")", "reply", "(", "r", ")" ]
Arguments: <document id> [-- [--apply]] Guess the labels that should be set on the document. Example: paperwork-shell guess_labels -- 20161207_1144_00_8 --apply Possible JSON replies: -- { "status": "error", "exception": "yyy", "reason": "xxxx", "args": "(xxxx, )" } -- { "status": "ok", "docid": "xxxx", "current_labels": ["label_a", "label_b"], "guessed_labels": ["label_b", "label_c"], "applied": "yes", }
[ "Arguments", ":", "<document", "id", ">", "[", "--", "[", "--", "apply", "]]" ]
python
train
27.2
poppy-project/pypot
pypot/vrep/remoteApiBindings/vrep.py
https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/remoteApiBindings/vrep.py#L520-L527
def simxAuxiliaryConsolePrint(clientID, consoleHandle, txt, operationMode): ''' Please have a look at the function description/documentation in the V-REP user manual ''' if (sys.version_info[0] == 3) and (type(txt) is str): txt=txt.encode('utf-8') return c_AuxiliaryConsolePrint(clientID, consoleHandle, txt, operationMode)
[ "def", "simxAuxiliaryConsolePrint", "(", "clientID", ",", "consoleHandle", ",", "txt", ",", "operationMode", ")", ":", "if", "(", "sys", ".", "version_info", "[", "0", "]", "==", "3", ")", "and", "(", "type", "(", "txt", ")", "is", "str", ")", ":", "txt", "=", "txt", ".", "encode", "(", "'utf-8'", ")", "return", "c_AuxiliaryConsolePrint", "(", "clientID", ",", "consoleHandle", ",", "txt", ",", "operationMode", ")" ]
Please have a look at the function description/documentation in the V-REP user manual
[ "Please", "have", "a", "look", "at", "the", "function", "description", "/", "documentation", "in", "the", "V", "-", "REP", "user", "manual" ]
python
train
43.125
LPgenerator/django-db-mailer
dbmail/providers/sendinblue/mail.py
https://github.com/LPgenerator/django-db-mailer/blob/217a73c21ba5c6b68738f74b2c55a6dd2c1afe35/dbmail/providers/sendinblue/mail.py#L26-L32
def email_address_to_list(email_address): """Convert an email address to a list.""" realname, address = email.utils.parseaddr(email_address) return ( [address, realname] if realname and address else [email_address, email_address] )
[ "def", "email_address_to_list", "(", "email_address", ")", ":", "realname", ",", "address", "=", "email", ".", "utils", ".", "parseaddr", "(", "email_address", ")", "return", "(", "[", "address", ",", "realname", "]", "if", "realname", "and", "address", "else", "[", "email_address", ",", "email_address", "]", ")" ]
Convert an email address to a list.
[ "Convert", "an", "email", "address", "to", "a", "list", "." ]
python
train
36.714286
stevearc/dql
dql/util.py
https://github.com/stevearc/dql/blob/e9d3aa22873076dae5ebd02e35318aa996b1e56a/dql/util.py#L113-L147
def eval_interval(interval): """ Evaluate an interval expression """ kwargs = { "years": 0, "months": 0, "weeks": 0, "days": 0, "hours": 0, "minutes": 0, "seconds": 0, "microseconds": 0, } for section in interval[1:]: name = section.getName() if name == "year": kwargs["years"] += int(section[0]) elif name == "month": kwargs["months"] += int(section[0]) elif name == "week": kwargs["weeks"] += int(section[0]) elif name == "day": kwargs["days"] += int(section[0]) elif name == "hour": kwargs["hours"] += int(section[0]) elif name == "minute": kwargs["minutes"] += int(section[0]) elif name == "second": kwargs["seconds"] += int(section[0]) elif name == "millisecond": kwargs["microseconds"] += 1000 * int(section[0]) elif name == "microsecond": kwargs["microseconds"] += int(section[0]) else: raise SyntaxError("Unrecognized interval type %r: %s" % (name, section)) return relativedelta(**kwargs)
[ "def", "eval_interval", "(", "interval", ")", ":", "kwargs", "=", "{", "\"years\"", ":", "0", ",", "\"months\"", ":", "0", ",", "\"weeks\"", ":", "0", ",", "\"days\"", ":", "0", ",", "\"hours\"", ":", "0", ",", "\"minutes\"", ":", "0", ",", "\"seconds\"", ":", "0", ",", "\"microseconds\"", ":", "0", ",", "}", "for", "section", "in", "interval", "[", "1", ":", "]", ":", "name", "=", "section", ".", "getName", "(", ")", "if", "name", "==", "\"year\"", ":", "kwargs", "[", "\"years\"", "]", "+=", "int", "(", "section", "[", "0", "]", ")", "elif", "name", "==", "\"month\"", ":", "kwargs", "[", "\"months\"", "]", "+=", "int", "(", "section", "[", "0", "]", ")", "elif", "name", "==", "\"week\"", ":", "kwargs", "[", "\"weeks\"", "]", "+=", "int", "(", "section", "[", "0", "]", ")", "elif", "name", "==", "\"day\"", ":", "kwargs", "[", "\"days\"", "]", "+=", "int", "(", "section", "[", "0", "]", ")", "elif", "name", "==", "\"hour\"", ":", "kwargs", "[", "\"hours\"", "]", "+=", "int", "(", "section", "[", "0", "]", ")", "elif", "name", "==", "\"minute\"", ":", "kwargs", "[", "\"minutes\"", "]", "+=", "int", "(", "section", "[", "0", "]", ")", "elif", "name", "==", "\"second\"", ":", "kwargs", "[", "\"seconds\"", "]", "+=", "int", "(", "section", "[", "0", "]", ")", "elif", "name", "==", "\"millisecond\"", ":", "kwargs", "[", "\"microseconds\"", "]", "+=", "1000", "*", "int", "(", "section", "[", "0", "]", ")", "elif", "name", "==", "\"microsecond\"", ":", "kwargs", "[", "\"microseconds\"", "]", "+=", "int", "(", "section", "[", "0", "]", ")", "else", ":", "raise", "SyntaxError", "(", "\"Unrecognized interval type %r: %s\"", "%", "(", "name", ",", "section", ")", ")", "return", "relativedelta", "(", "*", "*", "kwargs", ")" ]
Evaluate an interval expression
[ "Evaluate", "an", "interval", "expression" ]
python
train
33
tmr232/Sark
sark/qt.py
https://github.com/tmr232/Sark/blob/bee62879c2aea553a3924d887e2b30f2a6008581/sark/qt.py#L109-L118
def add_menu(self, name): """Add a top-level menu. The menu manager only allows one menu of the same name. However, it does not make sure that there are no pre-existing menus of that name. """ if name in self._menus: raise exceptions.MenuAlreadyExists("Menu name {!r} already exists.".format(name)) menu = self._menu.addMenu(name) self._menus[name] = menu
[ "def", "add_menu", "(", "self", ",", "name", ")", ":", "if", "name", "in", "self", ".", "_menus", ":", "raise", "exceptions", ".", "MenuAlreadyExists", "(", "\"Menu name {!r} already exists.\"", ".", "format", "(", "name", ")", ")", "menu", "=", "self", ".", "_menu", ".", "addMenu", "(", "name", ")", "self", ".", "_menus", "[", "name", "]", "=", "menu" ]
Add a top-level menu. The menu manager only allows one menu of the same name. However, it does not make sure that there are no pre-existing menus of that name.
[ "Add", "a", "top", "-", "level", "menu", "." ]
python
train
41.5
stefanfoulis/django-class-based-auth-views
class_based_auth_views/views.py
https://github.com/stefanfoulis/django-class-based-auth-views/blob/9998e2b8c1e5714c33a774a23c1a07d7a5928597/class_based_auth_views/views.py#L42-L49
def form_valid(self, form): """ The user has provided valid credentials (this was checked in AuthenticationForm.is_valid()). So now we can check the test cookie stuff and log him in. """ self.check_and_delete_test_cookie() login(self.request, form.get_user()) return super(LoginView, self).form_valid(form)
[ "def", "form_valid", "(", "self", ",", "form", ")", ":", "self", ".", "check_and_delete_test_cookie", "(", ")", "login", "(", "self", ".", "request", ",", "form", ".", "get_user", "(", ")", ")", "return", "super", "(", "LoginView", ",", "self", ")", ".", "form_valid", "(", "form", ")" ]
The user has provided valid credentials (this was checked in AuthenticationForm.is_valid()). So now we can check the test cookie stuff and log him in.
[ "The", "user", "has", "provided", "valid", "credentials", "(", "this", "was", "checked", "in", "AuthenticationForm", ".", "is_valid", "()", ")", ".", "So", "now", "we", "can", "check", "the", "test", "cookie", "stuff", "and", "log", "him", "in", "." ]
python
train
44.375
lappis-unb/salic-ml
src/salicml/metrics/finance/to_verify_funds.py
https://github.com/lappis-unb/salic-ml/blob/1b3ebc4f8067740999897ccffd9892dc94482a93/src/salicml/metrics/finance/to_verify_funds.py#L8-L19
def raised_funds_by_project(df): """ Raised funds organized by project. """ df['CaptacaoReal'] = df['CaptacaoReal'].apply( pd.to_numeric ) return ( df[['Pronac', 'CaptacaoReal']] .groupby(['Pronac']) .sum() )
[ "def", "raised_funds_by_project", "(", "df", ")", ":", "df", "[", "'CaptacaoReal'", "]", "=", "df", "[", "'CaptacaoReal'", "]", ".", "apply", "(", "pd", ".", "to_numeric", ")", "return", "(", "df", "[", "[", "'Pronac'", ",", "'CaptacaoReal'", "]", "]", ".", "groupby", "(", "[", "'Pronac'", "]", ")", ".", "sum", "(", ")", ")" ]
Raised funds organized by project.
[ "Raised", "funds", "organized", "by", "project", "." ]
python
train
21.416667
bpsmith/tia
tia/bbg/bbg_com.py
https://github.com/bpsmith/tia/blob/a7043b6383e557aeea8fc7112bbffd6e36a230e9/tia/bbg/bbg_com.py#L419-L428
def on_event(self, evt, is_final): """ this is invoked from in response to COM PumpWaitingMessages - different thread """ for msg in XmlHelper.message_iter(evt): # Single security element in historical request node = msg.GetElement('securityData') if node.HasElement('securityError'): secid = XmlHelper.get_child_value(node, 'security') self.security_errors.append(XmlHelper.as_security_error(node.GetElement('securityError'), secid)) else: self.on_security_data_node(node)
[ "def", "on_event", "(", "self", ",", "evt", ",", "is_final", ")", ":", "for", "msg", "in", "XmlHelper", ".", "message_iter", "(", "evt", ")", ":", "# Single security element in historical request", "node", "=", "msg", ".", "GetElement", "(", "'securityData'", ")", "if", "node", ".", "HasElement", "(", "'securityError'", ")", ":", "secid", "=", "XmlHelper", ".", "get_child_value", "(", "node", ",", "'security'", ")", "self", ".", "security_errors", ".", "append", "(", "XmlHelper", ".", "as_security_error", "(", "node", ".", "GetElement", "(", "'securityError'", ")", ",", "secid", ")", ")", "else", ":", "self", ".", "on_security_data_node", "(", "node", ")" ]
this is invoked from in response to COM PumpWaitingMessages - different thread
[ "this", "is", "invoked", "from", "in", "response", "to", "COM", "PumpWaitingMessages", "-", "different", "thread" ]
python
train
57.6
bhmm/bhmm
bhmm/hmm/generic_hmm.py
https://github.com/bhmm/bhmm/blob/9804d18c2ddb684fb4d90b544cc209617a89ca9a/bhmm/hmm/generic_hmm.py#L167-L172
def is_stationary(self): r""" Whether the MSM is stationary, i.e. whether the initial distribution is the stationary distribution of the hidden transition matrix. """ # for disconnected matrices, the stationary distribution depends on the estimator, so we can't compute # it directly. Therefore we test whether the initial distribution is stationary. return np.allclose(np.dot(self._Pi, self._Tij), self._Pi)
[ "def", "is_stationary", "(", "self", ")", ":", "# for disconnected matrices, the stationary distribution depends on the estimator, so we can't compute", "# it directly. Therefore we test whether the initial distribution is stationary.", "return", "np", ".", "allclose", "(", "np", ".", "dot", "(", "self", ".", "_Pi", ",", "self", ".", "_Tij", ")", ",", "self", ".", "_Pi", ")" ]
r""" Whether the MSM is stationary, i.e. whether the initial distribution is the stationary distribution of the hidden transition matrix.
[ "r", "Whether", "the", "MSM", "is", "stationary", "i", ".", "e", ".", "whether", "the", "initial", "distribution", "is", "the", "stationary", "distribution", "of", "the", "hidden", "transition", "matrix", "." ]
python
train
74
saltstack/salt
salt/cli/api.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/cli/api.py#L58-L71
def start(self): ''' Start the actual master. If sub-classed, don't **ever** forget to run: super(YourSubClass, self).start() NOTE: Run any required code before calling `super()`. ''' super(SaltAPI, self).start() if check_user(self.config['user']): log.info('The salt-api is starting up') self.api.run()
[ "def", "start", "(", "self", ")", ":", "super", "(", "SaltAPI", ",", "self", ")", ".", "start", "(", ")", "if", "check_user", "(", "self", ".", "config", "[", "'user'", "]", ")", ":", "log", ".", "info", "(", "'The salt-api is starting up'", ")", "self", ".", "api", ".", "run", "(", ")" ]
Start the actual master. If sub-classed, don't **ever** forget to run: super(YourSubClass, self).start() NOTE: Run any required code before calling `super()`.
[ "Start", "the", "actual", "master", "." ]
python
train
27.5
cokelaer/spectrum
src/spectrum/covar.py
https://github.com/cokelaer/spectrum/blob/bad6c32e3f10e185098748f67bb421b378b06afe/src/spectrum/covar.py#L10-L257
def arcovar_marple(x, order): r"""Estimate AR model parameters using covariance method This implementation is based on [Marple]_. This code is far more complicated and slower than :func:`arcovar` function, which is now the official version. See :func:`arcovar` for a detailed description of Covariance method. This function should be used in place of arcovar only if order<=4, for which :func:`arcovar` does not work. Fast algorithm for the solution of the covariance least squares normal equations from Marple. :param array X: Array of complex data samples :param int oder: Order of linear prediction model :return: * AF - Array of complex forward linear prediction coefficients * PF - Real forward linear prediction variance at order IP * AB - Array of complex backward linear prediction coefficients * PB - Real backward linear prediction variance at order IP * PV - store linear prediction coefficients .. note:: this code and the original code in Marple diverge for ip>10. it seems that this is related to single precision used with complex type in fortran whereas numpy uses double precision for complex type. :validation: the AR parameters are the same as those returned by a completely different function :func:`arcovar`. :References: [Marple]_ """ assert len(x) >= order, "X must be dimensioned >=N" # ----------------------------------------------------- Initialization x = np.array(x) N = len(x) # Equations 8.C.42 r0 = sum(abs(x)**2.) r1 = abs(x[0])**2 rN = abs(x[N-1])**2 pf = r0 - r1 pb = r0 - rN delta = 1. - r1 / r0 gamma = 1. - rN / r0 c = np.zeros(N, dtype=complex) d = np.zeros(N, dtype=complex) r = np.zeros(N, dtype=complex) af = np.zeros(N, dtype=complex) ab = np.zeros(N, dtype=complex) c[0] = x[N-1].conjugate() / r0 d[0] = x[0].conjugate() / r0 # special case if order == 0: pf = r0 / float(N) pb = pf return af, pf, ab, pb, 0 # ---------------------------------------------------------- MAIN LOOP #ip +1 because we want to enter in the loop to run the first part of the code. pbv = [] for m in range(0, order+1): logging.debug('----------------------------m=', m) logging.debug(c[0:2]) logging.debug(d[0:2]) r1 = 1./pf r2 = 1./pb r3 = 1./delta r4 = 1./gamma #logging.debug('starting r1r2r3r4=', r1, r2, r3, r4, pf, pb, delta, gamma) #Order update: AF and AB vectors ; time update: C and D vectors temp = 0.+0.j for k in range(m+1, N): temp = temp + x[k]*x[k-m-1].conjugate() r[m] = temp.conjugate() theta = x[0] * c[m] #print(('theta', theta)) # print(('c=', c[0:2])) # print(('d=', d[0:2])) if m == 0: pass else: for k in range(0, m): theta = theta + x[m-k] * c[k] # Eq. (8.C.39) r[k] = r[k] - x[N-m-1] * x[N-m+k].conjugate() # Eq. (8.C.32) temp = temp + af[m-k-1] * r[k].conjugate() #print 'loop1 k=', k #print ' theta=',theta, 'r[k]=',r[k], 'temp=', temp #print ' c=',c[k], 'af=',af[m-k-1] """if m > 0: if debug: print((m, N-m)) print(('Xk=0',x[m-0],x[N-m-1], x[N-m+0])) if m > 1: if debug: print('Xk=1',x[m-1],x[N-m-1], x[N-m+1]) """ c1 = -temp * r2 c2 = -r1 * temp.conjugate() c3 = theta * r3 c4 = r4 *theta.conjugate() #if debug: # print('c1 c2 c3 c4 before af=',c1 ,c2 ,c3 ,c4) af[m] = c1 # ! Eq. (8.C.19) ab[m] = c2 # ! Eq. (8.C.22) save = c[m] c[m] = save + c3*d[m] d[m] = d[m] + c4*save #if debug: # print('res',m,'af[m]=',af[m], ab[m], save, 'temp=',temp) if m == 0: pass else: #if debug:print('af before', af[0:2]) for k in range(0, m): save = af[k] af[k] = save + c1 * ab[m-k-1] # Eq. (8.C.18) ab[m-k-1] = ab[m-k-1] + c2 * save # Eq. (8.C.21) save = c[k] c[k] = save + c3*d[k] # Eq. (8.C.37) d[k] = d[k] + c4*save # Eq. (8.C.38) #if debug: # print('loop2 k=', k) # print(' af[k]=', af[k]) # print(' ab[m-k-1]=', ab[m-k-1]) # print(' c[k]=', c[k]) # print(' d[k]=', d[k]) #if debug: # print('af after=', af[0:2]) # print('ab=', ab[0:2]) r5 = temp.real**2 + temp.imag**2 pf = pf - r5*r2 # Eq. (8.C.20) pb = pb - r5*r1 # Eq. (8.C.23) r5 = theta.real**2 + theta.imag**2 delta = delta - r5*r4 # Eq. (8.C.39) gamma = gamma - r5*r3 # Eq. (8.C.40) #if debug: # print('r5r2r1deltagamma', r5, r2, r1 , delta, gamma) # print('pf before norm', pf, pb, N-m-1) if m != order-1: pass else: pf = pf / float(N-m-1) pb = pb / float(N-m-1) #if debug: # print('ENDING', N-m-1) break #if debug: # print('pf and pb', pf, pb) if pf > 0 and pb > 0: pass else: ValueError("Negative PF or PB value") if (delta > 0. and delta <=1 and gamma > 0. and gamma <=1): pass else: ValueError("Invalid delta or gamma value") #C Time update: AF and AB vectors; order update: C and D vectors r1 = 1./pf r2 = 1./pb r3 = 1./delta r4 = 1./gamma #if debug: # print('--------time update', r1, r2, r3, r4, m+1, N-m-1, x[m+1], x[N-m-2]) ef = x[m+1] eb = x[(N-1)-m-1] for k in range(0,m+1): #print 'k=', k, 'ef=', ef, ' eb=',eb,' af=',af[k], ab[k] #print x[m-k],x[N-m+k-1] ef = ef + af[k] * x[m-k] # Eq. (8.C.1) eb = eb + ab[k] * x[N-m+k-1] # Eq. (8.C.2) #ef = sum(af) #if debug: # print('efweb', ef , eb) c1 = ef*r3 c2 = eb*r4 c3 = eb.conjugate() * r2 c4 = ef.conjugate() * r1 #if debug: # print('c1c2c3c4', c1, c2, c3, c4) # print('af before', af[0:2]) for k in range(m, -1, -1): save = af[k] af[k] = save + c1 * d[k] # Eq. (8.C.33) d[k+1] = d[k] + c4 * save # Eq. (8.C.25) save = ab[k] ab[k] = save + c2 * c[m-k] # Eq. (8.C.35) c[m-k] = c[m-k] + c3 * save # Eq. (8.C.24) #if debug: # print('af after', af[0:2]) # print('d', d[0:2]) # print('ab', ab[0:2]) # print('c', c[0:2]) #if debug:print('Pb before', pf, pb) c[m+1] = c3 d[0] = c4 #r5 = abs(ef)**2 r5 = ef.real**2 + ef.imag**2 pf = pf - r5 * r3 # Eq. (8.C.34) delta = delta-r5 * r1 # Eq. (8.C.30) #r5 = abs(eb)**2 r5 = eb.real**2 + eb.imag**2 pb = pb - r5 * r4 # Eq. (8.C.36) #if debug: # print('Pb---------------------', m, pb, r5, r4) gamma = gamma-r5*r2 # Eq. (8.C.31) pbv.append(pb) if (pf > 0. and pb > 0.): pass else: ValueError("Negative PF or PB value") #if debug: # print(delta, gamma) if (delta > 0. and delta <= 1.) and (gamma > 0. and gamma <= 1.): pass else: ValueError("Invalid delta or gamma value") #af=array of forward coeff #ab=array of barward coeff #pb=backward variance #pf=forward variance return af, pf, ab, pb, pbv
[ "def", "arcovar_marple", "(", "x", ",", "order", ")", ":", "assert", "len", "(", "x", ")", ">=", "order", ",", "\"X must be dimensioned >=N\"", "# ----------------------------------------------------- Initialization", "x", "=", "np", ".", "array", "(", "x", ")", "N", "=", "len", "(", "x", ")", "# Equations 8.C.42", "r0", "=", "sum", "(", "abs", "(", "x", ")", "**", "2.", ")", "r1", "=", "abs", "(", "x", "[", "0", "]", ")", "**", "2", "rN", "=", "abs", "(", "x", "[", "N", "-", "1", "]", ")", "**", "2", "pf", "=", "r0", "-", "r1", "pb", "=", "r0", "-", "rN", "delta", "=", "1.", "-", "r1", "/", "r0", "gamma", "=", "1.", "-", "rN", "/", "r0", "c", "=", "np", ".", "zeros", "(", "N", ",", "dtype", "=", "complex", ")", "d", "=", "np", ".", "zeros", "(", "N", ",", "dtype", "=", "complex", ")", "r", "=", "np", ".", "zeros", "(", "N", ",", "dtype", "=", "complex", ")", "af", "=", "np", ".", "zeros", "(", "N", ",", "dtype", "=", "complex", ")", "ab", "=", "np", ".", "zeros", "(", "N", ",", "dtype", "=", "complex", ")", "c", "[", "0", "]", "=", "x", "[", "N", "-", "1", "]", ".", "conjugate", "(", ")", "/", "r0", "d", "[", "0", "]", "=", "x", "[", "0", "]", ".", "conjugate", "(", ")", "/", "r0", "# special case", "if", "order", "==", "0", ":", "pf", "=", "r0", "/", "float", "(", "N", ")", "pb", "=", "pf", "return", "af", ",", "pf", ",", "ab", ",", "pb", ",", "0", "# ---------------------------------------------------------- MAIN LOOP", "#ip +1 because we want to enter in the loop to run the first part of the code.", "pbv", "=", "[", "]", "for", "m", "in", "range", "(", "0", ",", "order", "+", "1", ")", ":", "logging", ".", "debug", "(", "'----------------------------m='", ",", "m", ")", "logging", ".", "debug", "(", "c", "[", "0", ":", "2", "]", ")", "logging", ".", "debug", "(", "d", "[", "0", ":", "2", "]", ")", "r1", "=", "1.", "/", "pf", "r2", "=", "1.", "/", "pb", "r3", "=", "1.", "/", "delta", "r4", "=", "1.", "/", "gamma", "#logging.debug('starting r1r2r3r4=', r1, r2, r3, r4, pf, pb, delta, gamma)", "#Order update: AF and AB vectors ; time update: C and D vectors", "temp", "=", "0.", "+", "0.j", "for", "k", "in", "range", "(", "m", "+", "1", ",", "N", ")", ":", "temp", "=", "temp", "+", "x", "[", "k", "]", "*", "x", "[", "k", "-", "m", "-", "1", "]", ".", "conjugate", "(", ")", "r", "[", "m", "]", "=", "temp", ".", "conjugate", "(", ")", "theta", "=", "x", "[", "0", "]", "*", "c", "[", "m", "]", "#print(('theta', theta))", "# print(('c=', c[0:2]))", "# print(('d=', d[0:2]))", "if", "m", "==", "0", ":", "pass", "else", ":", "for", "k", "in", "range", "(", "0", ",", "m", ")", ":", "theta", "=", "theta", "+", "x", "[", "m", "-", "k", "]", "*", "c", "[", "k", "]", "# Eq. (8.C.39)", "r", "[", "k", "]", "=", "r", "[", "k", "]", "-", "x", "[", "N", "-", "m", "-", "1", "]", "*", "x", "[", "N", "-", "m", "+", "k", "]", ".", "conjugate", "(", ")", "# Eq. (8.C.32)", "temp", "=", "temp", "+", "af", "[", "m", "-", "k", "-", "1", "]", "*", "r", "[", "k", "]", ".", "conjugate", "(", ")", "#print 'loop1 k=', k", "#print ' theta=',theta, 'r[k]=',r[k], 'temp=', temp", "#print ' c=',c[k], 'af=',af[m-k-1]", "\"\"\"if m > 0:\n if debug:\n print((m, N-m))\n print(('Xk=0',x[m-0],x[N-m-1], x[N-m+0]))\n if m > 1:\n if debug:\n print('Xk=1',x[m-1],x[N-m-1], x[N-m+1])\n \"\"\"", "c1", "=", "-", "temp", "*", "r2", "c2", "=", "-", "r1", "*", "temp", ".", "conjugate", "(", ")", "c3", "=", "theta", "*", "r3", "c4", "=", "r4", "*", "theta", ".", "conjugate", "(", ")", "#if debug:", "# print('c1 c2 c3 c4 before af=',c1 ,c2 ,c3 ,c4)", "af", "[", "m", "]", "=", "c1", "# ! Eq. (8.C.19)", "ab", "[", "m", "]", "=", "c2", "# ! Eq. (8.C.22)", "save", "=", "c", "[", "m", "]", "c", "[", "m", "]", "=", "save", "+", "c3", "*", "d", "[", "m", "]", "d", "[", "m", "]", "=", "d", "[", "m", "]", "+", "c4", "*", "save", "#if debug:", "# print('res',m,'af[m]=',af[m], ab[m], save, 'temp=',temp)", "if", "m", "==", "0", ":", "pass", "else", ":", "#if debug:print('af before', af[0:2])", "for", "k", "in", "range", "(", "0", ",", "m", ")", ":", "save", "=", "af", "[", "k", "]", "af", "[", "k", "]", "=", "save", "+", "c1", "*", "ab", "[", "m", "-", "k", "-", "1", "]", "# Eq. (8.C.18)", "ab", "[", "m", "-", "k", "-", "1", "]", "=", "ab", "[", "m", "-", "k", "-", "1", "]", "+", "c2", "*", "save", "# Eq. (8.C.21)", "save", "=", "c", "[", "k", "]", "c", "[", "k", "]", "=", "save", "+", "c3", "*", "d", "[", "k", "]", "# Eq. (8.C.37)", "d", "[", "k", "]", "=", "d", "[", "k", "]", "+", "c4", "*", "save", "# Eq. (8.C.38)", "#if debug:", "# print('loop2 k=', k)", "# print(' af[k]=', af[k])", "# print(' ab[m-k-1]=', ab[m-k-1])", "# print(' c[k]=', c[k])", "# print(' d[k]=', d[k])", "#if debug:", "# print('af after=', af[0:2])", "# print('ab=', ab[0:2])", "r5", "=", "temp", ".", "real", "**", "2", "+", "temp", ".", "imag", "**", "2", "pf", "=", "pf", "-", "r5", "*", "r2", "# Eq. (8.C.20)", "pb", "=", "pb", "-", "r5", "*", "r1", "# Eq. (8.C.23)", "r5", "=", "theta", ".", "real", "**", "2", "+", "theta", ".", "imag", "**", "2", "delta", "=", "delta", "-", "r5", "*", "r4", "# Eq. (8.C.39)", "gamma", "=", "gamma", "-", "r5", "*", "r3", "# Eq. (8.C.40)", "#if debug:", "# print('r5r2r1deltagamma', r5, r2, r1 , delta, gamma)", "# print('pf before norm', pf, pb, N-m-1)", "if", "m", "!=", "order", "-", "1", ":", "pass", "else", ":", "pf", "=", "pf", "/", "float", "(", "N", "-", "m", "-", "1", ")", "pb", "=", "pb", "/", "float", "(", "N", "-", "m", "-", "1", ")", "#if debug:", "# print('ENDING', N-m-1)", "break", "#if debug:", "# print('pf and pb', pf, pb)", "if", "pf", ">", "0", "and", "pb", ">", "0", ":", "pass", "else", ":", "ValueError", "(", "\"Negative PF or PB value\"", ")", "if", "(", "delta", ">", "0.", "and", "delta", "<=", "1", "and", "gamma", ">", "0.", "and", "gamma", "<=", "1", ")", ":", "pass", "else", ":", "ValueError", "(", "\"Invalid delta or gamma value\"", ")", "#C Time update: AF and AB vectors; order update: C and D vectors", "r1", "=", "1.", "/", "pf", "r2", "=", "1.", "/", "pb", "r3", "=", "1.", "/", "delta", "r4", "=", "1.", "/", "gamma", "#if debug:", "# print('--------time update', r1, r2, r3, r4, m+1, N-m-1, x[m+1], x[N-m-2])", "ef", "=", "x", "[", "m", "+", "1", "]", "eb", "=", "x", "[", "(", "N", "-", "1", ")", "-", "m", "-", "1", "]", "for", "k", "in", "range", "(", "0", ",", "m", "+", "1", ")", ":", "#print 'k=', k, 'ef=', ef, ' eb=',eb,' af=',af[k], ab[k]", "#print x[m-k],x[N-m+k-1]", "ef", "=", "ef", "+", "af", "[", "k", "]", "*", "x", "[", "m", "-", "k", "]", "# Eq. (8.C.1)", "eb", "=", "eb", "+", "ab", "[", "k", "]", "*", "x", "[", "N", "-", "m", "+", "k", "-", "1", "]", "# Eq. (8.C.2)", "#ef = sum(af)", "#if debug:", "# print('efweb', ef , eb)", "c1", "=", "ef", "*", "r3", "c2", "=", "eb", "*", "r4", "c3", "=", "eb", ".", "conjugate", "(", ")", "*", "r2", "c4", "=", "ef", ".", "conjugate", "(", ")", "*", "r1", "#if debug:", "# print('c1c2c3c4', c1, c2, c3, c4)", "# print('af before', af[0:2])", "for", "k", "in", "range", "(", "m", ",", "-", "1", ",", "-", "1", ")", ":", "save", "=", "af", "[", "k", "]", "af", "[", "k", "]", "=", "save", "+", "c1", "*", "d", "[", "k", "]", "# Eq. (8.C.33)", "d", "[", "k", "+", "1", "]", "=", "d", "[", "k", "]", "+", "c4", "*", "save", "# Eq. (8.C.25)", "save", "=", "ab", "[", "k", "]", "ab", "[", "k", "]", "=", "save", "+", "c2", "*", "c", "[", "m", "-", "k", "]", "# Eq. (8.C.35)", "c", "[", "m", "-", "k", "]", "=", "c", "[", "m", "-", "k", "]", "+", "c3", "*", "save", "# Eq. (8.C.24)", "#if debug:", "# print('af after', af[0:2])", "# print('d', d[0:2])", "# print('ab', ab[0:2])", "# print('c', c[0:2])", "#if debug:print('Pb before', pf, pb)", "c", "[", "m", "+", "1", "]", "=", "c3", "d", "[", "0", "]", "=", "c4", "#r5 = abs(ef)**2", "r5", "=", "ef", ".", "real", "**", "2", "+", "ef", ".", "imag", "**", "2", "pf", "=", "pf", "-", "r5", "*", "r3", "# Eq. (8.C.34)", "delta", "=", "delta", "-", "r5", "*", "r1", "# Eq. (8.C.30)", "#r5 = abs(eb)**2", "r5", "=", "eb", ".", "real", "**", "2", "+", "eb", ".", "imag", "**", "2", "pb", "=", "pb", "-", "r5", "*", "r4", "# Eq. (8.C.36)", "#if debug:", "# print('Pb---------------------', m, pb, r5, r4)", "gamma", "=", "gamma", "-", "r5", "*", "r2", "# Eq. (8.C.31)", "pbv", ".", "append", "(", "pb", ")", "if", "(", "pf", ">", "0.", "and", "pb", ">", "0.", ")", ":", "pass", "else", ":", "ValueError", "(", "\"Negative PF or PB value\"", ")", "#if debug:", "# print(delta, gamma)", "if", "(", "delta", ">", "0.", "and", "delta", "<=", "1.", ")", "and", "(", "gamma", ">", "0.", "and", "gamma", "<=", "1.", ")", ":", "pass", "else", ":", "ValueError", "(", "\"Invalid delta or gamma value\"", ")", "#af=array of forward coeff", "#ab=array of barward coeff", "#pb=backward variance", "#pf=forward variance", "return", "af", ",", "pf", ",", "ab", ",", "pb", ",", "pbv" ]
r"""Estimate AR model parameters using covariance method This implementation is based on [Marple]_. This code is far more complicated and slower than :func:`arcovar` function, which is now the official version. See :func:`arcovar` for a detailed description of Covariance method. This function should be used in place of arcovar only if order<=4, for which :func:`arcovar` does not work. Fast algorithm for the solution of the covariance least squares normal equations from Marple. :param array X: Array of complex data samples :param int oder: Order of linear prediction model :return: * AF - Array of complex forward linear prediction coefficients * PF - Real forward linear prediction variance at order IP * AB - Array of complex backward linear prediction coefficients * PB - Real backward linear prediction variance at order IP * PV - store linear prediction coefficients .. note:: this code and the original code in Marple diverge for ip>10. it seems that this is related to single precision used with complex type in fortran whereas numpy uses double precision for complex type. :validation: the AR parameters are the same as those returned by a completely different function :func:`arcovar`. :References: [Marple]_
[ "r", "Estimate", "AR", "model", "parameters", "using", "covariance", "method" ]
python
valid
32.987903
podio/podio-py
pypodio2/encode.py
https://github.com/podio/podio-py/blob/5ce956034a06c98b0ef18fcd940b36da0908ad6c/pypodio2/encode.py#L315-L319
def get_body_size(params, boundary): """Returns the number of bytes that the multipart/form-data encoding of ``params`` will be.""" size = sum(p.get_size(boundary) for p in MultipartParam.from_params(params)) return size + len(boundary) + 6
[ "def", "get_body_size", "(", "params", ",", "boundary", ")", ":", "size", "=", "sum", "(", "p", ".", "get_size", "(", "boundary", ")", "for", "p", "in", "MultipartParam", ".", "from_params", "(", "params", ")", ")", "return", "size", "+", "len", "(", "boundary", ")", "+", "6" ]
Returns the number of bytes that the multipart/form-data encoding of ``params`` will be.
[ "Returns", "the", "number", "of", "bytes", "that", "the", "multipart", "/", "form", "-", "data", "encoding", "of", "params", "will", "be", "." ]
python
train
50.4
log2timeline/plaso
plaso/cli/helpers/database_config.py
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/cli/helpers/database_config.py#L49-L75
def ParseOptions(cls, options, output_module): """Parses and validates options. Args: options (argparse.Namespace): parser options. output_module (OutputModule): output module to configure. Raises: BadConfigObject: when the output module object does not have the SetCredentials or SetDatabaseName methods. """ if not hasattr(output_module, 'SetCredentials'): raise errors.BadConfigObject('Unable to set username information.') if not hasattr(output_module, 'SetDatabaseName'): raise errors.BadConfigObject('Unable to set database information.') username = cls._ParseStringOption( options, 'username', default_value=cls._DEFAULT_USERNAME) password = cls._ParseStringOption( options, 'password', default_value=cls._DEFAULT_PASSWORD) name = cls._ParseStringOption( options, 'db_name', default_value=cls._DEFAULT_NAME) output_module.SetCredentials(username=username, password=password) output_module.SetDatabaseName(name) server_config.ServerArgumentsHelper.ParseOptions(options, output_module)
[ "def", "ParseOptions", "(", "cls", ",", "options", ",", "output_module", ")", ":", "if", "not", "hasattr", "(", "output_module", ",", "'SetCredentials'", ")", ":", "raise", "errors", ".", "BadConfigObject", "(", "'Unable to set username information.'", ")", "if", "not", "hasattr", "(", "output_module", ",", "'SetDatabaseName'", ")", ":", "raise", "errors", ".", "BadConfigObject", "(", "'Unable to set database information.'", ")", "username", "=", "cls", ".", "_ParseStringOption", "(", "options", ",", "'username'", ",", "default_value", "=", "cls", ".", "_DEFAULT_USERNAME", ")", "password", "=", "cls", ".", "_ParseStringOption", "(", "options", ",", "'password'", ",", "default_value", "=", "cls", ".", "_DEFAULT_PASSWORD", ")", "name", "=", "cls", ".", "_ParseStringOption", "(", "options", ",", "'db_name'", ",", "default_value", "=", "cls", ".", "_DEFAULT_NAME", ")", "output_module", ".", "SetCredentials", "(", "username", "=", "username", ",", "password", "=", "password", ")", "output_module", ".", "SetDatabaseName", "(", "name", ")", "server_config", ".", "ServerArgumentsHelper", ".", "ParseOptions", "(", "options", ",", "output_module", ")" ]
Parses and validates options. Args: options (argparse.Namespace): parser options. output_module (OutputModule): output module to configure. Raises: BadConfigObject: when the output module object does not have the SetCredentials or SetDatabaseName methods.
[ "Parses", "and", "validates", "options", "." ]
python
train
40.037037
gem/oq-engine
openquake/hazardlib/gsim/skarlatoudis_2013.py
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/skarlatoudis_2013.py#L122-L130
def _compute_distance(self, rup, dists, C): """ equation 3 pag 1960: ``c31 * logR + c32 * (R-Rref)`` """ rref = 1.0 c31 = -1.7 return (c31 * np.log10(dists.rhypo) + C['c32'] * (dists.rhypo - rref))
[ "def", "_compute_distance", "(", "self", ",", "rup", ",", "dists", ",", "C", ")", ":", "rref", "=", "1.0", "c31", "=", "-", "1.7", "return", "(", "c31", "*", "np", ".", "log10", "(", "dists", ".", "rhypo", ")", "+", "C", "[", "'c32'", "]", "*", "(", "dists", ".", "rhypo", "-", "rref", ")", ")" ]
equation 3 pag 1960: ``c31 * logR + c32 * (R-Rref)``
[ "equation", "3", "pag", "1960", ":" ]
python
train
27.333333
jotacor/ComunioPy
ComunioPy/__init__.py
https://github.com/jotacor/ComunioPy/blob/2dd71e3e197b497980ea7b9cfbec1da64dca3ed0/ComunioPy/__init__.py#L172-L182
def info_player_id(self,name): '''Get id using name football player''' number = 0 name=name.title().replace(" ", "+") headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain",'Referer': 'http://'+self.domain+'/team_news.phtml',"User-Agent": user_agent} req = self.session.get('http://stats.comunio.es/search.php?name='+name,headers=headers).content soup = BeautifulSoup(req) for i in soup.find_all('a',{'class','nowrap'}): number = re.search("([0-9]+)-", str(i)).group(1) break # Solo devuelve la primera coincidencia return number
[ "def", "info_player_id", "(", "self", ",", "name", ")", ":", "number", "=", "0", "name", "=", "name", ".", "title", "(", ")", ".", "replace", "(", "\" \"", ",", "\"+\"", ")", "headers", "=", "{", "\"Content-type\"", ":", "\"application/x-www-form-urlencoded\"", ",", "\"Accept\"", ":", "\"text/plain\"", ",", "'Referer'", ":", "'http://'", "+", "self", ".", "domain", "+", "'/team_news.phtml'", ",", "\"User-Agent\"", ":", "user_agent", "}", "req", "=", "self", ".", "session", ".", "get", "(", "'http://stats.comunio.es/search.php?name='", "+", "name", ",", "headers", "=", "headers", ")", ".", "content", "soup", "=", "BeautifulSoup", "(", "req", ")", "for", "i", "in", "soup", ".", "find_all", "(", "'a'", ",", "{", "'class'", ",", "'nowrap'", "}", ")", ":", "number", "=", "re", ".", "search", "(", "\"([0-9]+)-\"", ",", "str", "(", "i", ")", ")", ".", "group", "(", "1", ")", "break", "# Solo devuelve la primera coincidencia", "return", "number" ]
Get id using name football player
[ "Get", "id", "using", "name", "football", "player" ]
python
train
58
KKBOX/OpenAPI-Python
kkbox_developer_sdk/auth_flow.py
https://github.com/KKBOX/OpenAPI-Python/blob/77aa22fd300ed987d5507a5b66b149edcd28047d/kkbox_developer_sdk/auth_flow.py#L27-L60
def fetch_access_token_by_client_credentials(self): ''' There are three ways to let you start using KKBOX's Open/Partner API. The first way among them is to generate a client credential to fetch an access token to let KKBOX identify you. It allows you to access public data from KKBOX such as public albums, playlists and so on. However, you cannot use client credentials to access private data of a user. You have to let users to log-in into KKBOX and grant permissions for you to do so. You cannot use client credentials to do media playback either, since it requires a Premium Membership. :return: an access token :rtype: :class:`kkbox_sdk.KKBOXAccessToken` See `https://docs-en.kkbox.codes/docs/appendix-a`. ''' client_credential_base = '%s:%s' % (self.client_id, self.client_secret) try: client_credentials = base64.b64encode( bytes(client_credential_base, 'utf-8')) except: client_credentials = base64.b64encode(client_credential_base) client_credentials = client_credentials.decode('utf-8') headers = {'Authorization': 'Basic ' + client_credentials, 'Content-type': 'application/x-www-form-urlencoded'} post_parameters = {'grant_type': 'client_credentials', 'scope': 'user_profile user_territory'} json_object = self.http._post_data(KKBOXOAuth.OAUTH_TOKEN_URL, post_parameters, headers) self.access_token = KKBOXAccessToken(**json_object) return self.access_token
[ "def", "fetch_access_token_by_client_credentials", "(", "self", ")", ":", "client_credential_base", "=", "'%s:%s'", "%", "(", "self", ".", "client_id", ",", "self", ".", "client_secret", ")", "try", ":", "client_credentials", "=", "base64", ".", "b64encode", "(", "bytes", "(", "client_credential_base", ",", "'utf-8'", ")", ")", "except", ":", "client_credentials", "=", "base64", ".", "b64encode", "(", "client_credential_base", ")", "client_credentials", "=", "client_credentials", ".", "decode", "(", "'utf-8'", ")", "headers", "=", "{", "'Authorization'", ":", "'Basic '", "+", "client_credentials", ",", "'Content-type'", ":", "'application/x-www-form-urlencoded'", "}", "post_parameters", "=", "{", "'grant_type'", ":", "'client_credentials'", ",", "'scope'", ":", "'user_profile user_territory'", "}", "json_object", "=", "self", ".", "http", ".", "_post_data", "(", "KKBOXOAuth", ".", "OAUTH_TOKEN_URL", ",", "post_parameters", ",", "headers", ")", "self", ".", "access_token", "=", "KKBOXAccessToken", "(", "*", "*", "json_object", ")", "return", "self", ".", "access_token" ]
There are three ways to let you start using KKBOX's Open/Partner API. The first way among them is to generate a client credential to fetch an access token to let KKBOX identify you. It allows you to access public data from KKBOX such as public albums, playlists and so on. However, you cannot use client credentials to access private data of a user. You have to let users to log-in into KKBOX and grant permissions for you to do so. You cannot use client credentials to do media playback either, since it requires a Premium Membership. :return: an access token :rtype: :class:`kkbox_sdk.KKBOXAccessToken` See `https://docs-en.kkbox.codes/docs/appendix-a`.
[ "There", "are", "three", "ways", "to", "let", "you", "start", "using", "KKBOX", "s", "Open", "/", "Partner", "API", ".", "The", "first", "way", "among", "them", "is", "to", "generate", "a", "client", "credential", "to", "fetch", "an", "access", "token", "to", "let", "KKBOX", "identify", "you", ".", "It", "allows", "you", "to", "access", "public", "data", "from", "KKBOX", "such", "as", "public", "albums", "playlists", "and", "so", "on", "." ]
python
train
48.705882
Hackerfleet/hfos
hfos/tool/installer.py
https://github.com/Hackerfleet/hfos/blob/b6df14eacaffb6be5c844108873ff8763ec7f0c9/hfos/tool/installer.py#L377-L380
def service(ctx): """Install systemd service configuration""" install_service(ctx.obj['instance'], ctx.obj['dbhost'], ctx.obj['dbname'], ctx.obj['port'])
[ "def", "service", "(", "ctx", ")", ":", "install_service", "(", "ctx", ".", "obj", "[", "'instance'", "]", ",", "ctx", ".", "obj", "[", "'dbhost'", "]", ",", "ctx", ".", "obj", "[", "'dbname'", "]", ",", "ctx", ".", "obj", "[", "'port'", "]", ")" ]
Install systemd service configuration
[ "Install", "systemd", "service", "configuration" ]
python
train
39.75
CodeReclaimers/neat-python
neat/distributed.py
https://github.com/CodeReclaimers/neat-python/blob/e3dbe77c0d776eae41d598e6439e6ac02ab90b18/neat/distributed.py#L469-L473
def _reset_em(self): """Resets self.em and the shared instances.""" self.em = _ExtendedManager(self.addr, self.authkey, mode=self.mode, start=False) self.em.start() self._set_shared_instances()
[ "def", "_reset_em", "(", "self", ")", ":", "self", ".", "em", "=", "_ExtendedManager", "(", "self", ".", "addr", ",", "self", ".", "authkey", ",", "mode", "=", "self", ".", "mode", ",", "start", "=", "False", ")", "self", ".", "em", ".", "start", "(", ")", "self", ".", "_set_shared_instances", "(", ")" ]
Resets self.em and the shared instances.
[ "Resets", "self", ".", "em", "and", "the", "shared", "instances", "." ]
python
train
44.2
jleinonen/pytmatrix
pytmatrix/refractive.py
https://github.com/jleinonen/pytmatrix/blob/8803507fe5332786feab105fa74acf63e7121718/pytmatrix/refractive.py#L60-L74
def bruggeman_refractive(m, mix): """Bruggeman EMA for the refractive index. For instructions, see mg_refractive in this module, except this routine only works for two components. """ f1 = mix[0]/sum(mix) f2 = mix[1]/sum(mix) e1 = m[0]**2 e2 = m[1]**2 a = -2*(f1+f2) b = (2*f1*e1 - f1*e2 + 2*f2*e2 - f2*e1) c = (f1+f2)*e1*e2 e_eff = (-b - np.sqrt(b**2-4*a*c))/(2*a) return np.sqrt(e_eff)
[ "def", "bruggeman_refractive", "(", "m", ",", "mix", ")", ":", "f1", "=", "mix", "[", "0", "]", "/", "sum", "(", "mix", ")", "f2", "=", "mix", "[", "1", "]", "/", "sum", "(", "mix", ")", "e1", "=", "m", "[", "0", "]", "**", "2", "e2", "=", "m", "[", "1", "]", "**", "2", "a", "=", "-", "2", "*", "(", "f1", "+", "f2", ")", "b", "=", "(", "2", "*", "f1", "*", "e1", "-", "f1", "*", "e2", "+", "2", "*", "f2", "*", "e2", "-", "f2", "*", "e1", ")", "c", "=", "(", "f1", "+", "f2", ")", "*", "e1", "*", "e2", "e_eff", "=", "(", "-", "b", "-", "np", ".", "sqrt", "(", "b", "**", "2", "-", "4", "*", "a", "*", "c", ")", ")", "/", "(", "2", "*", "a", ")", "return", "np", ".", "sqrt", "(", "e_eff", ")" ]
Bruggeman EMA for the refractive index. For instructions, see mg_refractive in this module, except this routine only works for two components.
[ "Bruggeman", "EMA", "for", "the", "refractive", "index", "." ]
python
train
28.4
Esri/ArcREST
src/arcrest/manageportal/administration.py
https://github.com/Esri/ArcREST/blob/ab240fde2b0200f61d4a5f6df033516e53f2f416/src/arcrest/manageportal/administration.py#L678-L700
def refreshUserMembership(self, users): """ This operation iterates over every enterprise group configured in the portal and determines if the input user accounts belong to any of the configured enterprise groups. If there is any change in membership, the database and the indexes are updated for each user account. While portal automatically refreshes the memberships during a user login and during a periodic refresh (configured through the Update Identity Store operation), this operation allows an administrator to force a refresh. Parameters: users - comma seperated list of user names """ params = { "f" : "json", "users" : users } url = self._url + "/users/refreshMembership" return self._post(url=url, param_dict=params, proxy_port=self._proxy_port, proxy_url=self._proxy_url)
[ "def", "refreshUserMembership", "(", "self", ",", "users", ")", ":", "params", "=", "{", "\"f\"", ":", "\"json\"", ",", "\"users\"", ":", "users", "}", "url", "=", "self", ".", "_url", "+", "\"/users/refreshMembership\"", "return", "self", ".", "_post", "(", "url", "=", "url", ",", "param_dict", "=", "params", ",", "proxy_port", "=", "self", ".", "_proxy_port", ",", "proxy_url", "=", "self", ".", "_proxy_url", ")" ]
This operation iterates over every enterprise group configured in the portal and determines if the input user accounts belong to any of the configured enterprise groups. If there is any change in membership, the database and the indexes are updated for each user account. While portal automatically refreshes the memberships during a user login and during a periodic refresh (configured through the Update Identity Store operation), this operation allows an administrator to force a refresh. Parameters: users - comma seperated list of user names
[ "This", "operation", "iterates", "over", "every", "enterprise", "group", "configured", "in", "the", "portal", "and", "determines", "if", "the", "input", "user", "accounts", "belong", "to", "any", "of", "the", "configured", "enterprise", "groups", ".", "If", "there", "is", "any", "change", "in", "membership", "the", "database", "and", "the", "indexes", "are", "updated", "for", "each", "user", "account", ".", "While", "portal", "automatically", "refreshes", "the", "memberships", "during", "a", "user", "login", "and", "during", "a", "periodic", "refresh", "(", "configured", "through", "the", "Update", "Identity", "Store", "operation", ")", "this", "operation", "allows", "an", "administrator", "to", "force", "a", "refresh", "." ]
python
train
43.26087
napalm-automation/napalm
napalm/junos/junos.py
https://github.com/napalm-automation/napalm/blob/c11ae8bb5ce395698704a0051cdf8d144fbb150d/napalm/junos/junos.py#L149-L156
def _unlock(self): """Unlock the config DB.""" if self.locked: try: self.device.cu.unlock() self.locked = False except JnrpUnlockError as jue: raise UnlockError(jue.messsage)
[ "def", "_unlock", "(", "self", ")", ":", "if", "self", ".", "locked", ":", "try", ":", "self", ".", "device", ".", "cu", ".", "unlock", "(", ")", "self", ".", "locked", "=", "False", "except", "JnrpUnlockError", "as", "jue", ":", "raise", "UnlockError", "(", "jue", ".", "messsage", ")" ]
Unlock the config DB.
[ "Unlock", "the", "config", "DB", "." ]
python
train
31.875
sbg/sevenbridges-python
sevenbridges/models/task.py
https://github.com/sbg/sevenbridges-python/blob/f62640d1018d959f0b686f2dbe5e183085336607/sevenbridges/models/task.py#L367-L379
def get_execution_details(self): """ Retrieves execution details for a task. :return: Execution details instance. """ extra = { 'resource': self.__class__.__name__, 'query': {'id': self.id} } logger.info('Get execution details', extra=extra) data = self._api.get( self._URL['execution_details'].format(id=self.id)).json() return ExecutionDetails(api=self._api, **data)
[ "def", "get_execution_details", "(", "self", ")", ":", "extra", "=", "{", "'resource'", ":", "self", ".", "__class__", ".", "__name__", ",", "'query'", ":", "{", "'id'", ":", "self", ".", "id", "}", "}", "logger", ".", "info", "(", "'Get execution details'", ",", "extra", "=", "extra", ")", "data", "=", "self", ".", "_api", ".", "get", "(", "self", ".", "_URL", "[", "'execution_details'", "]", ".", "format", "(", "id", "=", "self", ".", "id", ")", ")", ".", "json", "(", ")", "return", "ExecutionDetails", "(", "api", "=", "self", ".", "_api", ",", "*", "*", "data", ")" ]
Retrieves execution details for a task. :return: Execution details instance.
[ "Retrieves", "execution", "details", "for", "a", "task", ".", ":", "return", ":", "Execution", "details", "instance", "." ]
python
train
35.692308
tensorflow/tensor2tensor
tensor2tensor/models/research/vqa_attention.py
https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/models/research/vqa_attention.py#L333-L400
def vqa_attention_base(): """VQA attention baseline hparams.""" hparams = common_hparams.basic_params1() hparams.batch_size = 128 hparams.use_fixed_batch_size = True, hparams.optimizer = "adam" hparams.optimizer_adam_beta1 = 0.9 hparams.optimizer_adam_beta2 = 0.999 hparams.optimizer_adam_epsilon = 1e-8 hparams.weight_decay = 0. hparams.clip_grad_norm = 0. hparams.initializer = "xavier" hparams.learning_rate = 0.5 hparams.learning_rate_schedule = "legacy" hparams.learning_rate_warmup_steps = 0 hparams.learning_rate_decay_scheme = "exp" hparams.learning_rate_decay_rate = 0.5 hparams.learning_rate_decay_steps = 50000 hparams.dropout = 0.5 hparams.summarize_grads = True hparams.summarize_vars = True # not used hparams hparams.label_smoothing = 0. hparams.multiply_embedding_mode = "" # add new hparams # preprocess hparams.add_hparam("resize_side", 512) hparams.add_hparam("height", 448) hparams.add_hparam("width", 448) hparams.add_hparam("distort", True) hparams.add_hparam("train_resnet", False) hparams.add_hparam("rnn_type", "lstm") hparams.add_hparam("num_rnn_layers", 1) hparams.add_hparam("max_question_length", 15) # lstm hidden size hparams.hidden_size = 512 hparams.add_hparam("attn_dim", 512) hparams.add_hparam("num_glimps", 2) hparams.add_hparam("num_mlp_layers", 1) hparams.add_hparam("mlp_dim", 1024) hparams.add_hparam("image_input_type", "image") hparams.add_hparam("image_model_fn", "resnet_v1_152") hparams.add_hparam("image_feat_size", 0) # self attention parts hparams.norm_type = "layer" hparams.layer_preprocess_sequence = "n" hparams.layer_postprocess_sequence = "da" hparams.layer_prepostprocess_dropout = 0.3 hparams.attention_dropout = 0.1 hparams.relu_dropout = 0.1 hparams.image_hidden_size = 2048 hparams.add_hparam("num_encoder_layers", 1) # Attention-related flags. hparams.add_hparam("num_heads", 8) hparams.add_hparam("attention_key_channels", 0) hparams.add_hparam("attention_value_channels", 0) hparams.add_hparam("image_filter_size", 1024) hparams.add_hparam("self_attention_type", "dot_product") hparams.add_hparam("scale_dotproduct", True) return hparams
[ "def", "vqa_attention_base", "(", ")", ":", "hparams", "=", "common_hparams", ".", "basic_params1", "(", ")", "hparams", ".", "batch_size", "=", "128", "hparams", ".", "use_fixed_batch_size", "=", "True", ",", "hparams", ".", "optimizer", "=", "\"adam\"", "hparams", ".", "optimizer_adam_beta1", "=", "0.9", "hparams", ".", "optimizer_adam_beta2", "=", "0.999", "hparams", ".", "optimizer_adam_epsilon", "=", "1e-8", "hparams", ".", "weight_decay", "=", "0.", "hparams", ".", "clip_grad_norm", "=", "0.", "hparams", ".", "initializer", "=", "\"xavier\"", "hparams", ".", "learning_rate", "=", "0.5", "hparams", ".", "learning_rate_schedule", "=", "\"legacy\"", "hparams", ".", "learning_rate_warmup_steps", "=", "0", "hparams", ".", "learning_rate_decay_scheme", "=", "\"exp\"", "hparams", ".", "learning_rate_decay_rate", "=", "0.5", "hparams", ".", "learning_rate_decay_steps", "=", "50000", "hparams", ".", "dropout", "=", "0.5", "hparams", ".", "summarize_grads", "=", "True", "hparams", ".", "summarize_vars", "=", "True", "# not used hparams", "hparams", ".", "label_smoothing", "=", "0.", "hparams", ".", "multiply_embedding_mode", "=", "\"\"", "# add new hparams", "# preprocess", "hparams", ".", "add_hparam", "(", "\"resize_side\"", ",", "512", ")", "hparams", ".", "add_hparam", "(", "\"height\"", ",", "448", ")", "hparams", ".", "add_hparam", "(", "\"width\"", ",", "448", ")", "hparams", ".", "add_hparam", "(", "\"distort\"", ",", "True", ")", "hparams", ".", "add_hparam", "(", "\"train_resnet\"", ",", "False", ")", "hparams", ".", "add_hparam", "(", "\"rnn_type\"", ",", "\"lstm\"", ")", "hparams", ".", "add_hparam", "(", "\"num_rnn_layers\"", ",", "1", ")", "hparams", ".", "add_hparam", "(", "\"max_question_length\"", ",", "15", ")", "# lstm hidden size", "hparams", ".", "hidden_size", "=", "512", "hparams", ".", "add_hparam", "(", "\"attn_dim\"", ",", "512", ")", "hparams", ".", "add_hparam", "(", "\"num_glimps\"", ",", "2", ")", "hparams", ".", "add_hparam", "(", "\"num_mlp_layers\"", ",", "1", ")", "hparams", ".", "add_hparam", "(", "\"mlp_dim\"", ",", "1024", ")", "hparams", ".", "add_hparam", "(", "\"image_input_type\"", ",", "\"image\"", ")", "hparams", ".", "add_hparam", "(", "\"image_model_fn\"", ",", "\"resnet_v1_152\"", ")", "hparams", ".", "add_hparam", "(", "\"image_feat_size\"", ",", "0", ")", "# self attention parts", "hparams", ".", "norm_type", "=", "\"layer\"", "hparams", ".", "layer_preprocess_sequence", "=", "\"n\"", "hparams", ".", "layer_postprocess_sequence", "=", "\"da\"", "hparams", ".", "layer_prepostprocess_dropout", "=", "0.3", "hparams", ".", "attention_dropout", "=", "0.1", "hparams", ".", "relu_dropout", "=", "0.1", "hparams", ".", "image_hidden_size", "=", "2048", "hparams", ".", "add_hparam", "(", "\"num_encoder_layers\"", ",", "1", ")", "# Attention-related flags.", "hparams", ".", "add_hparam", "(", "\"num_heads\"", ",", "8", ")", "hparams", ".", "add_hparam", "(", "\"attention_key_channels\"", ",", "0", ")", "hparams", ".", "add_hparam", "(", "\"attention_value_channels\"", ",", "0", ")", "hparams", ".", "add_hparam", "(", "\"image_filter_size\"", ",", "1024", ")", "hparams", ".", "add_hparam", "(", "\"self_attention_type\"", ",", "\"dot_product\"", ")", "hparams", ".", "add_hparam", "(", "\"scale_dotproduct\"", ",", "True", ")", "return", "hparams" ]
VQA attention baseline hparams.
[ "VQA", "attention", "baseline", "hparams", "." ]
python
train
31.75
wonambi-python/wonambi
wonambi/ioeeg/micromed.py
https://github.com/wonambi-python/wonambi/blob/1d8e3d7e53df8017c199f703bcab582914676e76/wonambi/ioeeg/micromed.py#L67-L114
def return_dat(self, chan, begsam, endsam): """Return the data as 2D numpy.ndarray. Parameters ---------- chan : int or list index (indices) of the channels to read begsam : int index of the first sample endsam : int index of the last sample Returns ------- numpy.ndarray A 2d matrix, with dimension chan X samples """ if type(chan) == int: # if single value is provided it needs to be transformed to list to generate a 2d matrix chan = [chan, ] if (begsam >= self._n_smp) or (endsam < 0): dat = empty((len(chan), endsam - begsam)) dat.fill(NaN) return dat if begsam < 0: begpad = -1 * begsam begsam = 0 else: begpad = 0 if endsam > self._n_smp: endpad = endsam - self._n_smp endsam = self._n_smp else: endpad = 0 dshape = (self._n_chan, endsam - begsam) sig_dtype = 'u' + str(self._n_bytes) offset = self._bodata + begsam * self._n_bytes * self._n_chan dat = memmap(str(self.filename), dtype=sig_dtype, order='F', mode='r', shape=dshape, offset=offset).astype('float') dat = pad(dat[chan, :], ((0, 0), (begpad, endpad)), mode='constant', constant_values=NaN) return (dat - self._offset[chan, None]) * self._factors[chan, None]
[ "def", "return_dat", "(", "self", ",", "chan", ",", "begsam", ",", "endsam", ")", ":", "if", "type", "(", "chan", ")", "==", "int", ":", "# if single value is provided it needs to be transformed to list to generate a 2d matrix", "chan", "=", "[", "chan", ",", "]", "if", "(", "begsam", ">=", "self", ".", "_n_smp", ")", "or", "(", "endsam", "<", "0", ")", ":", "dat", "=", "empty", "(", "(", "len", "(", "chan", ")", ",", "endsam", "-", "begsam", ")", ")", "dat", ".", "fill", "(", "NaN", ")", "return", "dat", "if", "begsam", "<", "0", ":", "begpad", "=", "-", "1", "*", "begsam", "begsam", "=", "0", "else", ":", "begpad", "=", "0", "if", "endsam", ">", "self", ".", "_n_smp", ":", "endpad", "=", "endsam", "-", "self", ".", "_n_smp", "endsam", "=", "self", ".", "_n_smp", "else", ":", "endpad", "=", "0", "dshape", "=", "(", "self", ".", "_n_chan", ",", "endsam", "-", "begsam", ")", "sig_dtype", "=", "'u'", "+", "str", "(", "self", ".", "_n_bytes", ")", "offset", "=", "self", ".", "_bodata", "+", "begsam", "*", "self", ".", "_n_bytes", "*", "self", ".", "_n_chan", "dat", "=", "memmap", "(", "str", "(", "self", ".", "filename", ")", ",", "dtype", "=", "sig_dtype", ",", "order", "=", "'F'", ",", "mode", "=", "'r'", ",", "shape", "=", "dshape", ",", "offset", "=", "offset", ")", ".", "astype", "(", "'float'", ")", "dat", "=", "pad", "(", "dat", "[", "chan", ",", ":", "]", ",", "(", "(", "0", ",", "0", ")", ",", "(", "begpad", ",", "endpad", ")", ")", ",", "mode", "=", "'constant'", ",", "constant_values", "=", "NaN", ")", "return", "(", "dat", "-", "self", ".", "_offset", "[", "chan", ",", "None", "]", ")", "*", "self", ".", "_factors", "[", "chan", ",", "None", "]" ]
Return the data as 2D numpy.ndarray. Parameters ---------- chan : int or list index (indices) of the channels to read begsam : int index of the first sample endsam : int index of the last sample Returns ------- numpy.ndarray A 2d matrix, with dimension chan X samples
[ "Return", "the", "data", "as", "2D", "numpy", ".", "ndarray", "." ]
python
train
30.6875
has2k1/scikit-misc
setup.py
https://github.com/has2k1/scikit-misc/blob/1d599761e11f84233e59602330e22823efeee226/setup.py#L102-L120
def setup_requires(): """ Return required packages Plus any version tests and warnings """ from pkg_resources import parse_version required = ['cython>=0.24.0'] numpy_requirement = 'numpy>=1.7.1' try: import numpy except Exception: required.append(numpy_requirement) else: if parse_version(numpy.__version__) < parse_version('1.7.1'): required.append(numpy_requirement) return required
[ "def", "setup_requires", "(", ")", ":", "from", "pkg_resources", "import", "parse_version", "required", "=", "[", "'cython>=0.24.0'", "]", "numpy_requirement", "=", "'numpy>=1.7.1'", "try", ":", "import", "numpy", "except", "Exception", ":", "required", ".", "append", "(", "numpy_requirement", ")", "else", ":", "if", "parse_version", "(", "numpy", ".", "__version__", ")", "<", "parse_version", "(", "'1.7.1'", ")", ":", "required", ".", "append", "(", "numpy_requirement", ")", "return", "required" ]
Return required packages Plus any version tests and warnings
[ "Return", "required", "packages" ]
python
train
23.684211
Leeps-Lab/otree-redwood
otree_redwood/models.py
https://github.com/Leeps-Lab/otree-redwood/blob/59212f61a256ef77e0a9ed392ff497ea83ee6245/otree_redwood/models.py#L213-L245
def when_all_players_ready(self): """Initializes decisions based on ``player.initial_decision()``. If :attr:`num_subperiods` is set, starts a timed task to run the sub-periods. """ self.group_decisions = {} self.subperiod_group_decisions = {} for player in self.get_players(): self.group_decisions[player.participant.code] = player.initial_decision() self.subperiod_group_decisions[player.participant.code] = player.initial_decision() if self.num_subperiods(): emitter = DiscreteEventEmitter( self.period_length() / self.num_subperiods(), self.period_length(), self, self._subperiod_tick) emitter.start() elif self.rate_limit(): def _tick(current_interval, intervals): self.refresh_from_db() if self._group_decisions_updated: self.send('group_decisions', self.group_decisions) self._group_decisions_updated = False self.save(update_fields=['_group_decisions_updated']) update_period = self.rate_limit() emitter = DiscreteEventEmitter( update_period, self.period_length(), self, _tick) emitter.start() self.save()
[ "def", "when_all_players_ready", "(", "self", ")", ":", "self", ".", "group_decisions", "=", "{", "}", "self", ".", "subperiod_group_decisions", "=", "{", "}", "for", "player", "in", "self", ".", "get_players", "(", ")", ":", "self", ".", "group_decisions", "[", "player", ".", "participant", ".", "code", "]", "=", "player", ".", "initial_decision", "(", ")", "self", ".", "subperiod_group_decisions", "[", "player", ".", "participant", ".", "code", "]", "=", "player", ".", "initial_decision", "(", ")", "if", "self", ".", "num_subperiods", "(", ")", ":", "emitter", "=", "DiscreteEventEmitter", "(", "self", ".", "period_length", "(", ")", "/", "self", ".", "num_subperiods", "(", ")", ",", "self", ".", "period_length", "(", ")", ",", "self", ",", "self", ".", "_subperiod_tick", ")", "emitter", ".", "start", "(", ")", "elif", "self", ".", "rate_limit", "(", ")", ":", "def", "_tick", "(", "current_interval", ",", "intervals", ")", ":", "self", ".", "refresh_from_db", "(", ")", "if", "self", ".", "_group_decisions_updated", ":", "self", ".", "send", "(", "'group_decisions'", ",", "self", ".", "group_decisions", ")", "self", ".", "_group_decisions_updated", "=", "False", "self", ".", "save", "(", "update_fields", "=", "[", "'_group_decisions_updated'", "]", ")", "update_period", "=", "self", ".", "rate_limit", "(", ")", "emitter", "=", "DiscreteEventEmitter", "(", "update_period", ",", "self", ".", "period_length", "(", ")", ",", "self", ",", "_tick", ")", "emitter", ".", "start", "(", ")", "self", ".", "save", "(", ")" ]
Initializes decisions based on ``player.initial_decision()``. If :attr:`num_subperiods` is set, starts a timed task to run the sub-periods.
[ "Initializes", "decisions", "based", "on", "player", ".", "initial_decision", "()", ".", "If", ":", "attr", ":", "num_subperiods", "is", "set", "starts", "a", "timed", "task", "to", "run", "the", "sub", "-", "periods", "." ]
python
train
41.787879
google/apitools
apitools/base/py/credentials_lib.py
https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/credentials_lib.py#L670-L680
def _EnsureFileExists(self): """Touches a file; returns False on error, True on success.""" if not os.path.exists(self._filename): old_umask = os.umask(0o177) try: open(self._filename, 'a+b').close() except OSError: return False finally: os.umask(old_umask) return True
[ "def", "_EnsureFileExists", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "_filename", ")", ":", "old_umask", "=", "os", ".", "umask", "(", "0o177", ")", "try", ":", "open", "(", "self", ".", "_filename", ",", "'a+b'", ")", ".", "close", "(", ")", "except", "OSError", ":", "return", "False", "finally", ":", "os", ".", "umask", "(", "old_umask", ")", "return", "True" ]
Touches a file; returns False on error, True on success.
[ "Touches", "a", "file", ";", "returns", "False", "on", "error", "True", "on", "success", "." ]
python
train
34.454545
jtwhite79/pyemu
pyemu/en.py
https://github.com/jtwhite79/pyemu/blob/c504d8e7a4097cec07655a6318d275739bd8148a/pyemu/en.py#L407-L418
def nonzero(self): """ property decorated method to get a new ObservationEnsemble of only non-zero weighted observations Returns ------- ObservationEnsemble : ObservationEnsemble """ df = self.loc[:,self.pst.nnz_obs_names] return ObservationEnsemble.from_dataframe(df=df, pst=self.pst.get(obs_names=self.pst.nnz_obs_names))
[ "def", "nonzero", "(", "self", ")", ":", "df", "=", "self", ".", "loc", "[", ":", ",", "self", ".", "pst", ".", "nnz_obs_names", "]", "return", "ObservationEnsemble", ".", "from_dataframe", "(", "df", "=", "df", ",", "pst", "=", "self", ".", "pst", ".", "get", "(", "obs_names", "=", "self", ".", "pst", ".", "nnz_obs_names", ")", ")" ]
property decorated method to get a new ObservationEnsemble of only non-zero weighted observations Returns ------- ObservationEnsemble : ObservationEnsemble
[ "property", "decorated", "method", "to", "get", "a", "new", "ObservationEnsemble", "of", "only", "non", "-", "zero", "weighted", "observations" ]
python
train
33.5
jantman/webhook2lambda2sqs
webhook2lambda2sqs/tf_generator.py
https://github.com/jantman/webhook2lambda2sqs/blob/c80c18d5a908ba8b8ee624dc3a977c633fba2b7c/webhook2lambda2sqs/tf_generator.py#L380-L398
def _generate_api_gateway_deployment(self): """ Generate the API Gateway Deployment/Stage, and add to self.tf_conf """ # finally, the deployment # this resource MUST come last dep_on = [] for rtype in sorted(self.tf_conf['resource'].keys()): for rname in sorted(self.tf_conf['resource'][rtype].keys()): dep_on.append('%s.%s' % (rtype, rname)) self.tf_conf['resource']['aws_api_gateway_deployment']['depl'] = { 'rest_api_id': '${aws_api_gateway_rest_api.rest_api.id}', 'description': self.description, 'stage_name': self.config.stage_name, 'depends_on': dep_on } self.tf_conf['output']['deployment_id'] = { 'value': '${aws_api_gateway_deployment.depl.id}' }
[ "def", "_generate_api_gateway_deployment", "(", "self", ")", ":", "# finally, the deployment", "# this resource MUST come last", "dep_on", "=", "[", "]", "for", "rtype", "in", "sorted", "(", "self", ".", "tf_conf", "[", "'resource'", "]", ".", "keys", "(", ")", ")", ":", "for", "rname", "in", "sorted", "(", "self", ".", "tf_conf", "[", "'resource'", "]", "[", "rtype", "]", ".", "keys", "(", ")", ")", ":", "dep_on", ".", "append", "(", "'%s.%s'", "%", "(", "rtype", ",", "rname", ")", ")", "self", ".", "tf_conf", "[", "'resource'", "]", "[", "'aws_api_gateway_deployment'", "]", "[", "'depl'", "]", "=", "{", "'rest_api_id'", ":", "'${aws_api_gateway_rest_api.rest_api.id}'", ",", "'description'", ":", "self", ".", "description", ",", "'stage_name'", ":", "self", ".", "config", ".", "stage_name", ",", "'depends_on'", ":", "dep_on", "}", "self", ".", "tf_conf", "[", "'output'", "]", "[", "'deployment_id'", "]", "=", "{", "'value'", ":", "'${aws_api_gateway_deployment.depl.id}'", "}" ]
Generate the API Gateway Deployment/Stage, and add to self.tf_conf
[ "Generate", "the", "API", "Gateway", "Deployment", "/", "Stage", "and", "add", "to", "self", ".", "tf_conf" ]
python
train
42.842105
DaveMcEwan/ndim
ndim_base.py
https://github.com/DaveMcEwan/ndim/blob/f1ea023d3e597160fc1e9e11921de07af659f9d2/ndim_base.py#L212-L226
def pt_shift(pt=(0.0, 0.0), shift=[0.0, 0.0]): '''Return given point shifted in N dimensions. ''' assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) assert isinstance(shift, list) l_sh = len(shift) assert l_sh == l_pt for i in shift: assert isinstance(i, float) return tuple([pt[i] + shift[i] for i in range(l_pt)])
[ "def", "pt_shift", "(", "pt", "=", "(", "0.0", ",", "0.0", ")", ",", "shift", "=", "[", "0.0", ",", "0.0", "]", ")", ":", "assert", "isinstance", "(", "pt", ",", "tuple", ")", "l_pt", "=", "len", "(", "pt", ")", "assert", "l_pt", ">", "1", "for", "i", "in", "pt", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "assert", "isinstance", "(", "shift", ",", "list", ")", "l_sh", "=", "len", "(", "shift", ")", "assert", "l_sh", "==", "l_pt", "for", "i", "in", "shift", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "return", "tuple", "(", "[", "pt", "[", "i", "]", "+", "shift", "[", "i", "]", "for", "i", "in", "range", "(", "l_pt", ")", "]", ")" ]
Return given point shifted in N dimensions.
[ "Return", "given", "point", "shifted", "in", "N", "dimensions", "." ]
python
train
27.466667
pypa/pipenv
pipenv/vendor/vistir/spin.py
https://github.com/pypa/pipenv/blob/cae8d76c210b9777e90aab76e9c4b0e53bb19cde/pipenv/vendor/vistir/spin.py#L318-L332
def _freeze(self, final_text, err=False): """Stop spinner, compose last frame and 'freeze' it.""" if not final_text: final_text = "" target = self.stderr if err else self.stdout if target.closed: target = sys.stderr if err else sys.stdout text = to_text(final_text) last_frame = self._compose_out(text, mode="last") self._last_frame = decode_output(last_frame, target_stream=target) # Should be stopped here, otherwise prints after # self._freeze call will mess up the spinner self.stop() target.write(self._last_frame)
[ "def", "_freeze", "(", "self", ",", "final_text", ",", "err", "=", "False", ")", ":", "if", "not", "final_text", ":", "final_text", "=", "\"\"", "target", "=", "self", ".", "stderr", "if", "err", "else", "self", ".", "stdout", "if", "target", ".", "closed", ":", "target", "=", "sys", ".", "stderr", "if", "err", "else", "sys", ".", "stdout", "text", "=", "to_text", "(", "final_text", ")", "last_frame", "=", "self", ".", "_compose_out", "(", "text", ",", "mode", "=", "\"last\"", ")", "self", ".", "_last_frame", "=", "decode_output", "(", "last_frame", ",", "target_stream", "=", "target", ")", "# Should be stopped here, otherwise prints after", "# self._freeze call will mess up the spinner", "self", ".", "stop", "(", ")", "target", ".", "write", "(", "self", ".", "_last_frame", ")" ]
Stop spinner, compose last frame and 'freeze' it.
[ "Stop", "spinner", "compose", "last", "frame", "and", "freeze", "it", "." ]
python
train
41.2
sykora/djournal
djournal/views.py
https://github.com/sykora/djournal/blob/c074e1f94e07e2630034a00c7dbd768e933f85e2/djournal/views.py#L10-L26
def entry_index(request, limit=0, template='djournal/entry_index.html'): '''Returns a reponse of a fixed number of entries; all of them, by default. ''' entries = Entry.public.all() if limit > 0: entries = entries[:limit] context = { 'entries': entries, } return render_to_response( template, context, context_instance=RequestContext(request), )
[ "def", "entry_index", "(", "request", ",", "limit", "=", "0", ",", "template", "=", "'djournal/entry_index.html'", ")", ":", "entries", "=", "Entry", ".", "public", ".", "all", "(", ")", "if", "limit", ">", "0", ":", "entries", "=", "entries", "[", ":", "limit", "]", "context", "=", "{", "'entries'", ":", "entries", ",", "}", "return", "render_to_response", "(", "template", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", ")" ]
Returns a reponse of a fixed number of entries; all of them, by default.
[ "Returns", "a", "reponse", "of", "a", "fixed", "number", "of", "entries", ";", "all", "of", "them", "by", "default", "." ]
python
train
23.588235
watson-developer-cloud/python-sdk
ibm_watson/natural_language_understanding_v1.py
https://github.com/watson-developer-cloud/python-sdk/blob/4c2c9df4466fcde88975da9ecd834e6ba95eb353/ibm_watson/natural_language_understanding_v1.py#L1015-L1020
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'emotion') and self.emotion is not None: _dict['emotion'] = self.emotion._to_dict() return _dict
[ "def", "_to_dict", "(", "self", ")", ":", "_dict", "=", "{", "}", "if", "hasattr", "(", "self", ",", "'emotion'", ")", "and", "self", ".", "emotion", "is", "not", "None", ":", "_dict", "[", "'emotion'", "]", "=", "self", ".", "emotion", ".", "_to_dict", "(", ")", "return", "_dict" ]
Return a json dictionary representing this model.
[ "Return", "a", "json", "dictionary", "representing", "this", "model", "." ]
python
train
39.833333
T-002/pycast
pycast/common/matrix.py
https://github.com/T-002/pycast/blob/8a53505c6d8367e0ea572e8af768e80b29e1cc41/pycast/common/matrix.py#L274-L290
def get_matrix_from_list(self, rows, columns, matrix_list, rowBased=True): """Create a new Matrix instance from a matrix_list. :note: This method is used to create a Matrix instance using cpython. :param integer rows: The height of the Matrix. :param integer columns: The width of the Matrix. :param matrix_list: A one dimensional list containing the values for Matrix. Depending on the rowBased parameter, either the rows are combined or the columns. :param rowBased Boolean: Only necessary if the oneDimArray is given. Indicates whether the oneDimArray combines rows together (rowBased=True) or columns (rowBased=False). """ resultMatrix = Matrix(columns, rows, matrix_list, rowBased) return resultMatrix
[ "def", "get_matrix_from_list", "(", "self", ",", "rows", ",", "columns", ",", "matrix_list", ",", "rowBased", "=", "True", ")", ":", "resultMatrix", "=", "Matrix", "(", "columns", ",", "rows", ",", "matrix_list", ",", "rowBased", ")", "return", "resultMatrix" ]
Create a new Matrix instance from a matrix_list. :note: This method is used to create a Matrix instance using cpython. :param integer rows: The height of the Matrix. :param integer columns: The width of the Matrix. :param matrix_list: A one dimensional list containing the values for Matrix. Depending on the rowBased parameter, either the rows are combined or the columns. :param rowBased Boolean: Only necessary if the oneDimArray is given. Indicates whether the oneDimArray combines rows together (rowBased=True) or columns (rowBased=False).
[ "Create", "a", "new", "Matrix", "instance", "from", "a", "matrix_list", "." ]
python
train
58.882353
PMEAL/OpenPNM
openpnm/algorithms/MixedInvasionPercolation.py
https://github.com/PMEAL/OpenPNM/blob/0547b5724ffedc0a593aae48639d36fe10e0baed/openpnm/algorithms/MixedInvasionPercolation.py#L272-L292
def _add_ps2q(self, throat, queue): """ Helper method to add pores to the cluster queue """ net = self.project.network elem_type = 'pore' # Find pores connected to newly invaded throat Ps = net['throat.conns'][throat] # Remove already invaded pores from Ps Ps = Ps[self['pore.invasion_sequence'][Ps] <= 0] if len(Ps) > 0: self._interface_Ps[Ps] = True for P in Ps: data = [] # Pc data.append(self["pore.entry_pressure"][P]) # Element Index data.append(P) # Element Type (Pore of Throat) data.append(elem_type) hq.heappush(queue, data)
[ "def", "_add_ps2q", "(", "self", ",", "throat", ",", "queue", ")", ":", "net", "=", "self", ".", "project", ".", "network", "elem_type", "=", "'pore'", "# Find pores connected to newly invaded throat", "Ps", "=", "net", "[", "'throat.conns'", "]", "[", "throat", "]", "# Remove already invaded pores from Ps", "Ps", "=", "Ps", "[", "self", "[", "'pore.invasion_sequence'", "]", "[", "Ps", "]", "<=", "0", "]", "if", "len", "(", "Ps", ")", ">", "0", ":", "self", ".", "_interface_Ps", "[", "Ps", "]", "=", "True", "for", "P", "in", "Ps", ":", "data", "=", "[", "]", "# Pc", "data", ".", "append", "(", "self", "[", "\"pore.entry_pressure\"", "]", "[", "P", "]", ")", "# Element Index", "data", ".", "append", "(", "P", ")", "# Element Type (Pore of Throat)", "data", ".", "append", "(", "elem_type", ")", "hq", ".", "heappush", "(", "queue", ",", "data", ")" ]
Helper method to add pores to the cluster queue
[ "Helper", "method", "to", "add", "pores", "to", "the", "cluster", "queue" ]
python
train
35.52381
DataDog/integrations-core
kubelet/datadog_checks/kubelet/prometheus.py
https://github.com/DataDog/integrations-core/blob/ebd41c873cf9f97a8c51bf9459bc6a7536af8acd/kubelet/datadog_checks/kubelet/prometheus.py#L453-L461
def container_fs_usage_bytes(self, metric, scraper_config): """ Number of bytes that are consumed by the container on this filesystem. """ metric_name = scraper_config['namespace'] + '.filesystem.usage' if metric.type not in METRIC_TYPES: self.log.error("Metric type %s unsupported for metric %s" % (metric.type, metric.name)) return self._process_usage_metric(metric_name, metric, self.fs_usage_bytes, scraper_config)
[ "def", "container_fs_usage_bytes", "(", "self", ",", "metric", ",", "scraper_config", ")", ":", "metric_name", "=", "scraper_config", "[", "'namespace'", "]", "+", "'.filesystem.usage'", "if", "metric", ".", "type", "not", "in", "METRIC_TYPES", ":", "self", ".", "log", ".", "error", "(", "\"Metric type %s unsupported for metric %s\"", "%", "(", "metric", ".", "type", ",", "metric", ".", "name", ")", ")", "return", "self", ".", "_process_usage_metric", "(", "metric_name", ",", "metric", ",", "self", ".", "fs_usage_bytes", ",", "scraper_config", ")" ]
Number of bytes that are consumed by the container on this filesystem.
[ "Number", "of", "bytes", "that", "are", "consumed", "by", "the", "container", "on", "this", "filesystem", "." ]
python
train
53.555556
tensorflow/cleverhans
cleverhans/model_zoo/deep_k_nearest_neighbors/dknn.py
https://github.com/tensorflow/cleverhans/blob/97488e215760547b81afc53f5e5de8ba7da5bd98/cleverhans/model_zoo/deep_k_nearest_neighbors/dknn.py#L134-L168
def find_train_knns(self, data_activations): """ Given a data_activation dictionary that contains a np array with activations for each layer, find the knns in the training data. """ knns_ind = {} knns_labels = {} for layer in self.layers: # Pre-process representations of data to normalize and remove training data mean. data_activations_layer = copy.copy(data_activations[layer]) nb_data = data_activations_layer.shape[0] data_activations_layer /= np.linalg.norm( data_activations_layer, axis=1).reshape(-1, 1) data_activations_layer -= self.centers[layer] # Use FALCONN to find indices of nearest neighbors in training data. knns_ind[layer] = np.zeros( (data_activations_layer.shape[0], self.neighbors), dtype=np.int32) knn_errors = 0 for i in range(data_activations_layer.shape[0]): query_res = self.query_objects[layer].find_k_nearest_neighbors( data_activations_layer[i], self.neighbors) try: knns_ind[layer][i, :] = query_res except: # pylint: disable-msg=W0702 knns_ind[layer][i, :len(query_res)] = query_res knn_errors += knns_ind[layer].shape[1] - len(query_res) # Find labels of neighbors found in the training data. knns_labels[layer] = np.zeros((nb_data, self.neighbors), dtype=np.int32) for data_id in range(nb_data): knns_labels[layer][data_id, :] = self.train_labels[knns_ind[layer][data_id]] return knns_ind, knns_labels
[ "def", "find_train_knns", "(", "self", ",", "data_activations", ")", ":", "knns_ind", "=", "{", "}", "knns_labels", "=", "{", "}", "for", "layer", "in", "self", ".", "layers", ":", "# Pre-process representations of data to normalize and remove training data mean.", "data_activations_layer", "=", "copy", ".", "copy", "(", "data_activations", "[", "layer", "]", ")", "nb_data", "=", "data_activations_layer", ".", "shape", "[", "0", "]", "data_activations_layer", "/=", "np", ".", "linalg", ".", "norm", "(", "data_activations_layer", ",", "axis", "=", "1", ")", ".", "reshape", "(", "-", "1", ",", "1", ")", "data_activations_layer", "-=", "self", ".", "centers", "[", "layer", "]", "# Use FALCONN to find indices of nearest neighbors in training data.", "knns_ind", "[", "layer", "]", "=", "np", ".", "zeros", "(", "(", "data_activations_layer", ".", "shape", "[", "0", "]", ",", "self", ".", "neighbors", ")", ",", "dtype", "=", "np", ".", "int32", ")", "knn_errors", "=", "0", "for", "i", "in", "range", "(", "data_activations_layer", ".", "shape", "[", "0", "]", ")", ":", "query_res", "=", "self", ".", "query_objects", "[", "layer", "]", ".", "find_k_nearest_neighbors", "(", "data_activations_layer", "[", "i", "]", ",", "self", ".", "neighbors", ")", "try", ":", "knns_ind", "[", "layer", "]", "[", "i", ",", ":", "]", "=", "query_res", "except", ":", "# pylint: disable-msg=W0702", "knns_ind", "[", "layer", "]", "[", "i", ",", ":", "len", "(", "query_res", ")", "]", "=", "query_res", "knn_errors", "+=", "knns_ind", "[", "layer", "]", ".", "shape", "[", "1", "]", "-", "len", "(", "query_res", ")", "# Find labels of neighbors found in the training data.", "knns_labels", "[", "layer", "]", "=", "np", ".", "zeros", "(", "(", "nb_data", ",", "self", ".", "neighbors", ")", ",", "dtype", "=", "np", ".", "int32", ")", "for", "data_id", "in", "range", "(", "nb_data", ")", ":", "knns_labels", "[", "layer", "]", "[", "data_id", ",", ":", "]", "=", "self", ".", "train_labels", "[", "knns_ind", "[", "layer", "]", "[", "data_id", "]", "]", "return", "knns_ind", ",", "knns_labels" ]
Given a data_activation dictionary that contains a np array with activations for each layer, find the knns in the training data.
[ "Given", "a", "data_activation", "dictionary", "that", "contains", "a", "np", "array", "with", "activations", "for", "each", "layer", "find", "the", "knns", "in", "the", "training", "data", "." ]
python
train
43
apple/turicreate
src/external/coremltools_wrap/coremltools/deps/protobuf/python/setup.py
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/src/external/coremltools_wrap/coremltools/deps/protobuf/python/setup.py#L50-L77
def generate_proto(source, require = True): """Invokes the Protocol Compiler to generate a _pb2.py from the given .proto file. Does nothing if the output already exists and is newer than the input.""" if not require and not os.path.exists(source): return output = source.replace(".proto", "_pb2.py").replace("../src/", "") if (not os.path.exists(output) or (os.path.exists(source) and os.path.getmtime(source) > os.path.getmtime(output))): print("Generating %s..." % output) if not os.path.exists(source): sys.stderr.write("Can't find required file: %s\n" % source) sys.exit(-1) if protoc is None: sys.stderr.write( "protoc is not installed nor found in ../src. Please compile it " "or install the binary package.\n") sys.exit(-1) protoc_command = [ protoc, "-I../src", "-I.", "--python_out=.", source ] if subprocess.call(protoc_command) != 0: sys.exit(-1)
[ "def", "generate_proto", "(", "source", ",", "require", "=", "True", ")", ":", "if", "not", "require", "and", "not", "os", ".", "path", ".", "exists", "(", "source", ")", ":", "return", "output", "=", "source", ".", "replace", "(", "\".proto\"", ",", "\"_pb2.py\"", ")", ".", "replace", "(", "\"../src/\"", ",", "\"\"", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "output", ")", "or", "(", "os", ".", "path", ".", "exists", "(", "source", ")", "and", "os", ".", "path", ".", "getmtime", "(", "source", ")", ">", "os", ".", "path", ".", "getmtime", "(", "output", ")", ")", ")", ":", "print", "(", "\"Generating %s...\"", "%", "output", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "source", ")", ":", "sys", ".", "stderr", ".", "write", "(", "\"Can't find required file: %s\\n\"", "%", "source", ")", "sys", ".", "exit", "(", "-", "1", ")", "if", "protoc", "is", "None", ":", "sys", ".", "stderr", ".", "write", "(", "\"protoc is not installed nor found in ../src. Please compile it \"", "\"or install the binary package.\\n\"", ")", "sys", ".", "exit", "(", "-", "1", ")", "protoc_command", "=", "[", "protoc", ",", "\"-I../src\"", ",", "\"-I.\"", ",", "\"--python_out=.\"", ",", "source", "]", "if", "subprocess", ".", "call", "(", "protoc_command", ")", "!=", "0", ":", "sys", ".", "exit", "(", "-", "1", ")" ]
Invokes the Protocol Compiler to generate a _pb2.py from the given .proto file. Does nothing if the output already exists and is newer than the input.
[ "Invokes", "the", "Protocol", "Compiler", "to", "generate", "a", "_pb2", ".", "py", "from", "the", "given", ".", "proto", "file", ".", "Does", "nothing", "if", "the", "output", "already", "exists", "and", "is", "newer", "than", "the", "input", "." ]
python
train
33.464286
pybel/pybel
src/pybel/manager/citation_utils.py
https://github.com/pybel/pybel/blob/c8a7a1bdae4c475fa2a8c77f3a9a5f6d79556ca0/src/pybel/manager/citation_utils.py#L100-L133
def enrich_citation_model(manager, citation, p) -> bool: """Enrich a citation model with the information from PubMed. :param pybel.manager.Manager manager: :param Citation citation: A citation model :param dict p: The dictionary from PubMed E-Utils corresponding to d["result"][pmid] """ if 'error' in p: log.warning('Error downloading PubMed') return False citation.name = p['fulljournalname'] citation.title = p['title'] citation.volume = p['volume'] citation.issue = p['issue'] citation.pages = p['pages'] citation.first = manager.get_or_create_author(p['sortfirstauthor']) citation.last = manager.get_or_create_author(p['lastauthor']) if 'authors' in p: for author in p['authors']: author_model = manager.get_or_create_author(author['name']) if author_model not in citation.authors: citation.authors.append(author_model) publication_date = p['pubdate'] sanitized_publication_date = sanitize_date(publication_date) if sanitized_publication_date: citation.date = datetime.strptime(sanitized_publication_date, '%Y-%m-%d') else: log.info('result had date with strange format: %s', publication_date) return True
[ "def", "enrich_citation_model", "(", "manager", ",", "citation", ",", "p", ")", "->", "bool", ":", "if", "'error'", "in", "p", ":", "log", ".", "warning", "(", "'Error downloading PubMed'", ")", "return", "False", "citation", ".", "name", "=", "p", "[", "'fulljournalname'", "]", "citation", ".", "title", "=", "p", "[", "'title'", "]", "citation", ".", "volume", "=", "p", "[", "'volume'", "]", "citation", ".", "issue", "=", "p", "[", "'issue'", "]", "citation", ".", "pages", "=", "p", "[", "'pages'", "]", "citation", ".", "first", "=", "manager", ".", "get_or_create_author", "(", "p", "[", "'sortfirstauthor'", "]", ")", "citation", ".", "last", "=", "manager", ".", "get_or_create_author", "(", "p", "[", "'lastauthor'", "]", ")", "if", "'authors'", "in", "p", ":", "for", "author", "in", "p", "[", "'authors'", "]", ":", "author_model", "=", "manager", ".", "get_or_create_author", "(", "author", "[", "'name'", "]", ")", "if", "author_model", "not", "in", "citation", ".", "authors", ":", "citation", ".", "authors", ".", "append", "(", "author_model", ")", "publication_date", "=", "p", "[", "'pubdate'", "]", "sanitized_publication_date", "=", "sanitize_date", "(", "publication_date", ")", "if", "sanitized_publication_date", ":", "citation", ".", "date", "=", "datetime", ".", "strptime", "(", "sanitized_publication_date", ",", "'%Y-%m-%d'", ")", "else", ":", "log", ".", "info", "(", "'result had date with strange format: %s'", ",", "publication_date", ")", "return", "True" ]
Enrich a citation model with the information from PubMed. :param pybel.manager.Manager manager: :param Citation citation: A citation model :param dict p: The dictionary from PubMed E-Utils corresponding to d["result"][pmid]
[ "Enrich", "a", "citation", "model", "with", "the", "information", "from", "PubMed", "." ]
python
train
36.441176
timothyhahn/rui
rui/rui.py
https://github.com/timothyhahn/rui/blob/ac9f587fb486760d77332866c6e876f78a810f74/rui/rui.py#L99-L106
def remove_system(self, system): ''' Removes system from world and kills system ''' if system in self._systems: self._systems.remove(system) else: raise UnmanagedSystemError(system)
[ "def", "remove_system", "(", "self", ",", "system", ")", ":", "if", "system", "in", "self", ".", "_systems", ":", "self", ".", "_systems", ".", "remove", "(", "system", ")", "else", ":", "raise", "UnmanagedSystemError", "(", "system", ")" ]
Removes system from world and kills system
[ "Removes", "system", "from", "world", "and", "kills", "system" ]
python
train
29.75
ianepperson/telnetsrvlib
telnetsrv/telnetsrvlib.py
https://github.com/ianepperson/telnetsrvlib/blob/fac52a4a333c2d373d53d295a76a0bbd71e5d682/telnetsrv/telnetsrvlib.py#L619-L622
def _readline_echo(self, char, echo): """Echo a recieved character, move cursor etc...""" if self._readline_do_echo(echo): self.write(char)
[ "def", "_readline_echo", "(", "self", ",", "char", ",", "echo", ")", ":", "if", "self", ".", "_readline_do_echo", "(", "echo", ")", ":", "self", ".", "write", "(", "char", ")" ]
Echo a recieved character, move cursor etc...
[ "Echo", "a", "recieved", "character", "move", "cursor", "etc", "..." ]
python
train
41