text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def EnsembleLearner(learners): """Given a list of learning algorithms, have them vote.""" def train(dataset): predictors = [learner(dataset) for learner in learners] def predict(example): return mode(predictor(example) for predictor in predictors) return predict return train
[ "def", "EnsembleLearner", "(", "learners", ")", ":", "def", "train", "(", "dataset", ")", ":", "predictors", "=", "[", "learner", "(", "dataset", ")", "for", "learner", "in", "learners", "]", "def", "predict", "(", "example", ")", ":", "return", "mode", "(", "predictor", "(", "example", ")", "for", "predictor", "in", "predictors", ")", "return", "predict", "return", "train" ]
39.5
16.75
def get_developer_certificate(self, developer_certificate_id, authorization, **kwargs): # noqa: E501 """Fetch an existing developer certificate to connect to the bootstrap server. # noqa: E501 This REST API is intended to be used by customers to fetch an existing developer certificate (a certificate that can be flashed into multiple devices to connect to bootstrap server). **Example usage:** curl -X GET \"http://api.us-east-1.mbedcloud.com/v3/developer-certificates/THE_CERTIFICATE_ID\" -H \"accept: application/json\" -H \"Authorization: Bearer THE_ACCESS_TOKEN\" # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.get_developer_certificate(developer_certificate_id, authorization, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str developer_certificate_id: A unique identifier for the developer certificate. (required) :param str authorization: Bearer {Access Token}. (required) :return: DeveloperCertificateResponseData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.get_developer_certificate_with_http_info(developer_certificate_id, authorization, **kwargs) # noqa: E501 else: (data) = self.get_developer_certificate_with_http_info(developer_certificate_id, authorization, **kwargs) # noqa: E501 return data
[ "def", "get_developer_certificate", "(", "self", ",", "developer_certificate_id", ",", "authorization", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'asynchronous'", ")", ":", "return", "self", ".", "get_developer_certificate_with_http_info", "(", "developer_certificate_id", ",", "authorization", ",", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "get_developer_certificate_with_http_info", "(", "developer_certificate_id", ",", "authorization", ",", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
74.090909
45.5
def _drain(self, cycles=None): """Activate the pump and let the flow go. This will call the process() method on each attached module until a StopIteration is raised, usually by a pump when it reached the EOF. A StopIteration is also raised when self.cycles was set and the number of cycles has reached that limit. """ log.info("Now draining...") if not cycles: log.info("No cycle count, the pipeline may be drained forever.") if self.calibration: log.info("Setting up the detector calibration.") for module in self.modules: module.detector = self.calibration.get_detector() try: while not self._stop: cycle_start = timer() cycle_start_cpu = process_time() log.debug("Pumping blob #{0}".format(self._cycle_count)) self.blob = Blob() for module in self.modules: if self.blob is None: log.debug( "Skipping {0}, due to empty blob.".format( module.name ) ) continue if module.only_if and not module.only_if.issubset(set( self.blob.keys())): log.debug( "Skipping {0}, due to missing required key" "'{1}'.".format(module.name, module.only_if) ) continue if (self._cycle_count + 1) % module.every != 0: log.debug( "Skipping {0} (every {1} iterations).".format( module.name, module.every ) ) continue if module.blob_keys is not None: blob_to_send = Blob({ k: self.blob[k] for k in module.blob_keys if k in self.blob }) else: blob_to_send = self.blob log.debug("Processing {0} ".format(module.name)) start = timer() start_cpu = process_time() new_blob = module(blob_to_send) if self.timeit or module.timeit: self._timeit[module]['process'] \ .append(timer() - start) self._timeit[module]['process_cpu'] \ .append(process_time() - start_cpu) if module.blob_keys is not None: if new_blob is not None: for key in new_blob.keys(): self.blob[key] = new_blob[key] else: self.blob = new_blob self._timeit['cycles'].append(timer() - cycle_start) self._timeit['cycles_cpu'].append( process_time() - cycle_start_cpu ) self._cycle_count += 1 if cycles and self._cycle_count >= cycles: raise StopIteration except StopIteration: log.info("Nothing left to pump through.") return self.finish()
[ "def", "_drain", "(", "self", ",", "cycles", "=", "None", ")", ":", "log", ".", "info", "(", "\"Now draining...\"", ")", "if", "not", "cycles", ":", "log", ".", "info", "(", "\"No cycle count, the pipeline may be drained forever.\"", ")", "if", "self", ".", "calibration", ":", "log", ".", "info", "(", "\"Setting up the detector calibration.\"", ")", "for", "module", "in", "self", ".", "modules", ":", "module", ".", "detector", "=", "self", ".", "calibration", ".", "get_detector", "(", ")", "try", ":", "while", "not", "self", ".", "_stop", ":", "cycle_start", "=", "timer", "(", ")", "cycle_start_cpu", "=", "process_time", "(", ")", "log", ".", "debug", "(", "\"Pumping blob #{0}\"", ".", "format", "(", "self", ".", "_cycle_count", ")", ")", "self", ".", "blob", "=", "Blob", "(", ")", "for", "module", "in", "self", ".", "modules", ":", "if", "self", ".", "blob", "is", "None", ":", "log", ".", "debug", "(", "\"Skipping {0}, due to empty blob.\"", ".", "format", "(", "module", ".", "name", ")", ")", "continue", "if", "module", ".", "only_if", "and", "not", "module", ".", "only_if", ".", "issubset", "(", "set", "(", "self", ".", "blob", ".", "keys", "(", ")", ")", ")", ":", "log", ".", "debug", "(", "\"Skipping {0}, due to missing required key\"", "\"'{1}'.\"", ".", "format", "(", "module", ".", "name", ",", "module", ".", "only_if", ")", ")", "continue", "if", "(", "self", ".", "_cycle_count", "+", "1", ")", "%", "module", ".", "every", "!=", "0", ":", "log", ".", "debug", "(", "\"Skipping {0} (every {1} iterations).\"", ".", "format", "(", "module", ".", "name", ",", "module", ".", "every", ")", ")", "continue", "if", "module", ".", "blob_keys", "is", "not", "None", ":", "blob_to_send", "=", "Blob", "(", "{", "k", ":", "self", ".", "blob", "[", "k", "]", "for", "k", "in", "module", ".", "blob_keys", "if", "k", "in", "self", ".", "blob", "}", ")", "else", ":", "blob_to_send", "=", "self", ".", "blob", "log", ".", "debug", "(", "\"Processing {0} \"", ".", "format", "(", "module", ".", "name", ")", ")", "start", "=", "timer", "(", ")", "start_cpu", "=", "process_time", "(", ")", "new_blob", "=", "module", "(", "blob_to_send", ")", "if", "self", ".", "timeit", "or", "module", ".", "timeit", ":", "self", ".", "_timeit", "[", "module", "]", "[", "'process'", "]", ".", "append", "(", "timer", "(", ")", "-", "start", ")", "self", ".", "_timeit", "[", "module", "]", "[", "'process_cpu'", "]", ".", "append", "(", "process_time", "(", ")", "-", "start_cpu", ")", "if", "module", ".", "blob_keys", "is", "not", "None", ":", "if", "new_blob", "is", "not", "None", ":", "for", "key", "in", "new_blob", ".", "keys", "(", ")", ":", "self", ".", "blob", "[", "key", "]", "=", "new_blob", "[", "key", "]", "else", ":", "self", ".", "blob", "=", "new_blob", "self", ".", "_timeit", "[", "'cycles'", "]", ".", "append", "(", "timer", "(", ")", "-", "cycle_start", ")", "self", ".", "_timeit", "[", "'cycles_cpu'", "]", ".", "append", "(", "process_time", "(", ")", "-", "cycle_start_cpu", ")", "self", ".", "_cycle_count", "+=", "1", "if", "cycles", "and", "self", ".", "_cycle_count", ">=", "cycles", ":", "raise", "StopIteration", "except", "StopIteration", ":", "log", ".", "info", "(", "\"Nothing left to pump through.\"", ")", "return", "self", ".", "finish", "(", ")" ]
39.655172
17.747126
def get_function_name(s): """ Get the function name from a C-style function declaration string. :param str s: A C-style function declaration string. :return: The function name. :rtype: str """ s = s.strip() if s.startswith("__attribute__"): # Remove "__attribute__ ((foobar))" if "))" not in s: raise ValueError("__attribute__ is present, but I cannot find double-right parenthesis in the function " "declaration string.") s = s[s.index("))") + 2 : ].strip() if '(' not in s: raise ValueError("Cannot find any left parenthesis in the function declaration string.") func_name = s[:s.index('(')].strip() for i, ch in enumerate(reversed(func_name)): if ch == ' ': pos = len(func_name) - 1 - i break else: raise ValueError('Cannot find any space in the function declaration string.') func_name = func_name[pos + 1 : ] return func_name
[ "def", "get_function_name", "(", "s", ")", ":", "s", "=", "s", ".", "strip", "(", ")", "if", "s", ".", "startswith", "(", "\"__attribute__\"", ")", ":", "# Remove \"__attribute__ ((foobar))\"", "if", "\"))\"", "not", "in", "s", ":", "raise", "ValueError", "(", "\"__attribute__ is present, but I cannot find double-right parenthesis in the function \"", "\"declaration string.\"", ")", "s", "=", "s", "[", "s", ".", "index", "(", "\"))\"", ")", "+", "2", ":", "]", ".", "strip", "(", ")", "if", "'('", "not", "in", "s", ":", "raise", "ValueError", "(", "\"Cannot find any left parenthesis in the function declaration string.\"", ")", "func_name", "=", "s", "[", ":", "s", ".", "index", "(", "'('", ")", "]", ".", "strip", "(", ")", "for", "i", ",", "ch", "in", "enumerate", "(", "reversed", "(", "func_name", ")", ")", ":", "if", "ch", "==", "' '", ":", "pos", "=", "len", "(", "func_name", ")", "-", "1", "-", "i", "break", "else", ":", "raise", "ValueError", "(", "'Cannot find any space in the function declaration string.'", ")", "func_name", "=", "func_name", "[", "pos", "+", "1", ":", "]", "return", "func_name" ]
30.8125
22.5625
def deleteByteArray(self, context, page, returnError): """please override""" returnError.contents.value = self.IllegalStateError raise NotImplementedError("You must override this method.")
[ "def", "deleteByteArray", "(", "self", ",", "context", ",", "page", ",", "returnError", ")", ":", "returnError", ".", "contents", ".", "value", "=", "self", ".", "IllegalStateError", "raise", "NotImplementedError", "(", "\"You must override this method.\"", ")" ]
52.25
15
def diagnose_cluster( self, project_id, region, cluster_name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Gets cluster diagnostic information. After the operation completes, the Operation.response field contains ``DiagnoseClusterOutputLocation``. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.ClusterControllerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `region`: >>> region = '' >>> >>> # TODO: Initialize `cluster_name`: >>> cluster_name = '' >>> >>> response = client.diagnose_cluster(project_id, region, cluster_name) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. region (str): Required. The Cloud Dataproc region in which to handle the request. cluster_name (str): Required. The cluster name. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "diagnose_cluster" not in self._inner_api_calls: self._inner_api_calls[ "diagnose_cluster" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.diagnose_cluster, default_retry=self._method_configs["DiagnoseCluster"].retry, default_timeout=self._method_configs["DiagnoseCluster"].timeout, client_info=self._client_info, ) request = clusters_pb2.DiagnoseClusterRequest( project_id=project_id, region=region, cluster_name=cluster_name ) operation = self._inner_api_calls["diagnose_cluster"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, empty_pb2.Empty, metadata_type=clusters_pb2.DiagnoseClusterResults, )
[ "def", "diagnose_cluster", "(", "self", ",", "project_id", ",", "region", ",", "cluster_name", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"diagnose_cluster\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"diagnose_cluster\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "diagnose_cluster", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"DiagnoseCluster\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"DiagnoseCluster\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "clusters_pb2", ".", "DiagnoseClusterRequest", "(", "project_id", "=", "project_id", ",", "region", "=", "region", ",", "cluster_name", "=", "cluster_name", ")", "operation", "=", "self", ".", "_inner_api_calls", "[", "\"diagnose_cluster\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")", "return", "google", ".", "api_core", ".", "operation", ".", "from_gapic", "(", "operation", ",", "self", ".", "transport", ".", "_operations_client", ",", "empty_pb2", ".", "Empty", ",", "metadata_type", "=", "clusters_pb2", ".", "DiagnoseClusterResults", ",", ")" ]
41.364706
22.988235
def getSecurityHkey(self, s): ''' returns the necessary string value for an HKEY for the win32security module ''' try: return self.hkeys_security[s] except KeyError: raise CommandExecutionError(( 'No HKEY named "{0}". It should be one of the following: {1}' ).format(s, ', '.join(self.hkeys_security)))
[ "def", "getSecurityHkey", "(", "self", ",", "s", ")", ":", "try", ":", "return", "self", ".", "hkeys_security", "[", "s", "]", "except", "KeyError", ":", "raise", "CommandExecutionError", "(", "(", "'No HKEY named \"{0}\". It should be one of the following: {1}'", ")", ".", "format", "(", "s", ",", "', '", ".", "join", "(", "self", ".", "hkeys_security", ")", ")", ")" ]
39.1
21.7
def _parse_info(self, s): """Given relevant data from the ffmpeg output, set audio parameter fields on this object. """ # Sample rate. match = re.search(r'(\d+) hz', s) if match: self.samplerate = int(match.group(1)) else: self.samplerate = 0 # Channel count. match = re.search(r'hz, ([^,]+),', s) if match: mode = match.group(1) if mode == 'stereo': self.channels = 2 else: cmatch = re.match(r'(\d+)\.?(\d)?', mode) if cmatch: self.channels = sum(map(int, cmatch.group().split('.'))) else: self.channels = 1 else: self.channels = 0 # Duration. match = re.search( r'duration: (\d+):(\d+):(\d+).(\d)', s ) if match: durparts = list(map(int, match.groups())) duration = ( durparts[0] * 60 * 60 + durparts[1] * 60 + durparts[2] + float(durparts[3]) / 10 ) self.duration = duration else: # No duration found. self.duration = 0
[ "def", "_parse_info", "(", "self", ",", "s", ")", ":", "# Sample rate.", "match", "=", "re", ".", "search", "(", "r'(\\d+) hz'", ",", "s", ")", "if", "match", ":", "self", ".", "samplerate", "=", "int", "(", "match", ".", "group", "(", "1", ")", ")", "else", ":", "self", ".", "samplerate", "=", "0", "# Channel count.", "match", "=", "re", ".", "search", "(", "r'hz, ([^,]+),'", ",", "s", ")", "if", "match", ":", "mode", "=", "match", ".", "group", "(", "1", ")", "if", "mode", "==", "'stereo'", ":", "self", ".", "channels", "=", "2", "else", ":", "cmatch", "=", "re", ".", "match", "(", "r'(\\d+)\\.?(\\d)?'", ",", "mode", ")", "if", "cmatch", ":", "self", ".", "channels", "=", "sum", "(", "map", "(", "int", ",", "cmatch", ".", "group", "(", ")", ".", "split", "(", "'.'", ")", ")", ")", "else", ":", "self", ".", "channels", "=", "1", "else", ":", "self", ".", "channels", "=", "0", "# Duration.", "match", "=", "re", ".", "search", "(", "r'duration: (\\d+):(\\d+):(\\d+).(\\d)'", ",", "s", ")", "if", "match", ":", "durparts", "=", "list", "(", "map", "(", "int", ",", "match", ".", "groups", "(", ")", ")", ")", "duration", "=", "(", "durparts", "[", "0", "]", "*", "60", "*", "60", "+", "durparts", "[", "1", "]", "*", "60", "+", "durparts", "[", "2", "]", "+", "float", "(", "durparts", "[", "3", "]", ")", "/", "10", ")", "self", ".", "duration", "=", "duration", "else", ":", "# No duration found.", "self", ".", "duration", "=", "0" ]
29.404762
14.809524
def same_types(self, index1, index2): """Returns True if both symbol table elements are of the same type""" try: same = self.table[index1].type == self.table[index2].type != SharedData.TYPES.NO_TYPE except Exception: self.error() return same
[ "def", "same_types", "(", "self", ",", "index1", ",", "index2", ")", ":", "try", ":", "same", "=", "self", ".", "table", "[", "index1", "]", ".", "type", "==", "self", ".", "table", "[", "index2", "]", ".", "type", "!=", "SharedData", ".", "TYPES", ".", "NO_TYPE", "except", "Exception", ":", "self", ".", "error", "(", ")", "return", "same" ]
42.428571
19.571429
def narrow_sqs(self, sqs): """ TODO: Currently this is an AND conjunction. It should vary depending on the value of self.select_many. """ if self.select_many: sq = None for value in self.get_applicable_values(): q = SQ(**{self.field_name: sqs.query.clean(value.value)}) if sq: sq = sq | q else: sq = q if sq: sqs = sqs.narrow(sq) else: for value in self.get_applicable_values(): sqs = sqs.narrow( u'%s:"%s"' % (self.field_name, sqs.query.clean(value.value)) ) return sqs
[ "def", "narrow_sqs", "(", "self", ",", "sqs", ")", ":", "if", "self", ".", "select_many", ":", "sq", "=", "None", "for", "value", "in", "self", ".", "get_applicable_values", "(", ")", ":", "q", "=", "SQ", "(", "*", "*", "{", "self", ".", "field_name", ":", "sqs", ".", "query", ".", "clean", "(", "value", ".", "value", ")", "}", ")", "if", "sq", ":", "sq", "=", "sq", "|", "q", "else", ":", "sq", "=", "q", "if", "sq", ":", "sqs", "=", "sqs", ".", "narrow", "(", "sq", ")", "else", ":", "for", "value", "in", "self", ".", "get_applicable_values", "(", ")", ":", "sqs", "=", "sqs", ".", "narrow", "(", "u'%s:\"%s\"'", "%", "(", "self", ".", "field_name", ",", "sqs", ".", "query", ".", "clean", "(", "value", ".", "value", ")", ")", ")", "return", "sqs" ]
32.272727
17.636364
def save(self, force_eav=False, **kwargs): """ Saves entity instance and creates/updates related attribute instances. :param eav: if True (default), EAV attributes are saved along with entity. """ # save entity super(BaseEntity, self).save(**kwargs) # TODO: think about use cases; are we doing it right? #if not self.check_eav_allowed(): # import warnings # warnings.warn('EAV attributes are going to be saved along with entity' # ' despite %s.check_eav_allowed() returned False.' # % type(self), RuntimeWarning) # create/update EAV attributes for schema in self.get_schemata(): value = getattr(self, schema.name, None) schema.save_attr(self, value)
[ "def", "save", "(", "self", ",", "force_eav", "=", "False", ",", "*", "*", "kwargs", ")", ":", "# save entity", "super", "(", "BaseEntity", ",", "self", ")", ".", "save", "(", "*", "*", "kwargs", ")", "# TODO: think about use cases; are we doing it right?", "#if not self.check_eav_allowed():", "# import warnings", "# warnings.warn('EAV attributes are going to be saved along with entity'", "# ' despite %s.check_eav_allowed() returned False.'", "# % type(self), RuntimeWarning)", "# create/update EAV attributes", "for", "schema", "in", "self", ".", "get_schemata", "(", ")", ":", "value", "=", "getattr", "(", "self", ",", "schema", ".", "name", ",", "None", ")", "schema", ".", "save_attr", "(", "self", ",", "value", ")" ]
38.52381
19.666667
def delete_servers(*servers, **options): ''' Removes NTP servers configured on the device. :param servers: list of IP Addresses/Domain Names to be removed as NTP servers :param test (bool): discard loaded config. By default ``test`` is False (will not dicard the changes) :param commit (bool): commit loaded config. By default ``commit`` is True (will commit the changes). Useful when the user does not want to commit after each change, but after a couple. By default this function will commit the config changes (if any). To load without committing, use the ``commit`` option. For dry run use the ``test`` argument. CLI Example: .. code-block:: bash salt '*' ntp.delete_servers 8.8.8.8 time.apple.com salt '*' ntp.delete_servers 172.17.17.1 test=True # only displays the diff salt '*' ntp.delete_servers 192.168.0.1 commit=False # preserves the changes, but does not commit ''' test = options.pop('test', False) commit = options.pop('commit', True) return __salt__['net.load_template']('delete_ntp_servers', servers=servers, test=test, commit=commit, inherit_napalm_device=napalm_device)
[ "def", "delete_servers", "(", "*", "servers", ",", "*", "*", "options", ")", ":", "test", "=", "options", ".", "pop", "(", "'test'", ",", "False", ")", "commit", "=", "options", ".", "pop", "(", "'commit'", ",", "True", ")", "return", "__salt__", "[", "'net.load_template'", "]", "(", "'delete_ntp_servers'", ",", "servers", "=", "servers", ",", "test", "=", "test", ",", "commit", "=", "commit", ",", "inherit_napalm_device", "=", "napalm_device", ")" ]
39.441176
27.911765
def flash_block(self, addr, data, smart_flash=True, chip_erase=None, progress_cb=None, fast_verify=False): """! @brief Flash a block of data. """ assert self.region is not None assert self.region.contains_range(start=addr, length=len(data)) fb = FlashBuilder(self) fb.add_data(addr, data) info = fb.program(chip_erase, progress_cb, smart_flash, fast_verify) return info
[ "def", "flash_block", "(", "self", ",", "addr", ",", "data", ",", "smart_flash", "=", "True", ",", "chip_erase", "=", "None", ",", "progress_cb", "=", "None", ",", "fast_verify", "=", "False", ")", ":", "assert", "self", ".", "region", "is", "not", "None", "assert", "self", ".", "region", ".", "contains_range", "(", "start", "=", "addr", ",", "length", "=", "len", "(", "data", ")", ")", "fb", "=", "FlashBuilder", "(", "self", ")", "fb", ".", "add_data", "(", "addr", ",", "data", ")", "info", "=", "fb", ".", "program", "(", "chip_erase", ",", "progress_cb", ",", "smart_flash", ",", "fast_verify", ")", "return", "info" ]
40
19
def get_names_including_errors_by_namespace(graph: BELGraph, namespace: str) -> Set[str]: """Takes the names from the graph in a given namespace (:func:`pybel.struct.summary.get_names_by_namespace`) and the erroneous names from the same namespace (:func:`get_incorrect_names_by_namespace`) and returns them together as a unioned set :return: The set of all correct and incorrect names from the given namespace in the graph """ return get_names_by_namespace(graph, namespace) | get_incorrect_names_by_namespace(graph, namespace)
[ "def", "get_names_including_errors_by_namespace", "(", "graph", ":", "BELGraph", ",", "namespace", ":", "str", ")", "->", "Set", "[", "str", "]", ":", "return", "get_names_by_namespace", "(", "graph", ",", "namespace", ")", "|", "get_incorrect_names_by_namespace", "(", "graph", ",", "namespace", ")" ]
68.125
37.75
def _calc_mask(self): """Computes a boolean mask from the user defined constraints.""" mask = [] for row in self._constraints: mask.append(tuple(x is None for x in row)) return tuple(mask)
[ "def", "_calc_mask", "(", "self", ")", ":", "mask", "=", "[", "]", "for", "row", "in", "self", ".", "_constraints", ":", "mask", ".", "append", "(", "tuple", "(", "x", "is", "None", "for", "x", "in", "row", ")", ")", "return", "tuple", "(", "mask", ")" ]
34.166667
13.166667
def command(state, args): """Add an anime from an AniDB search.""" args = parser.parse_args(args[1:]) if args.watching: rows = query.select.select(state.db, 'regexp IS NOT NULL', [], ['aid']) aids = [anime.aid for anime in rows] elif args.incomplete: rows = query.select.select(state.db, 'enddate IS NULL', [], ['aid']) aids = [anime.aid for anime in rows] else: aid = state.results.parse_aid(args.aid, default_key='db') aids = [aid] if not aids: return anime = request_anime(aids.pop()) query.update.add(state.db, anime) print('Updated {} {}'.format(anime.aid, anime.title)) for aid in aids: time.sleep(2) anime = request_anime(aid) query.update.add(state.db, anime) print('Updated {} {}'.format(anime.aid, anime.title))
[ "def", "command", "(", "state", ",", "args", ")", ":", "args", "=", "parser", ".", "parse_args", "(", "args", "[", "1", ":", "]", ")", "if", "args", ".", "watching", ":", "rows", "=", "query", ".", "select", ".", "select", "(", "state", ".", "db", ",", "'regexp IS NOT NULL'", ",", "[", "]", ",", "[", "'aid'", "]", ")", "aids", "=", "[", "anime", ".", "aid", "for", "anime", "in", "rows", "]", "elif", "args", ".", "incomplete", ":", "rows", "=", "query", ".", "select", ".", "select", "(", "state", ".", "db", ",", "'enddate IS NULL'", ",", "[", "]", ",", "[", "'aid'", "]", ")", "aids", "=", "[", "anime", ".", "aid", "for", "anime", "in", "rows", "]", "else", ":", "aid", "=", "state", ".", "results", ".", "parse_aid", "(", "args", ".", "aid", ",", "default_key", "=", "'db'", ")", "aids", "=", "[", "aid", "]", "if", "not", "aids", ":", "return", "anime", "=", "request_anime", "(", "aids", ".", "pop", "(", ")", ")", "query", ".", "update", ".", "add", "(", "state", ".", "db", ",", "anime", ")", "print", "(", "'Updated {} {}'", ".", "format", "(", "anime", ".", "aid", ",", "anime", ".", "title", ")", ")", "for", "aid", "in", "aids", ":", "time", ".", "sleep", "(", "2", ")", "anime", "=", "request_anime", "(", "aid", ")", "query", ".", "update", ".", "add", "(", "state", ".", "db", ",", "anime", ")", "print", "(", "'Updated {} {}'", ".", "format", "(", "anime", ".", "aid", ",", "anime", ".", "title", ")", ")" ]
37.636364
15.909091
def get_model_core(model): """ Return core view of given model or None """ model_label = lower('%s.%s' % (model._meta.app_label, model._meta.object_name)) return registered_model_cores.get(model_label)
[ "def", "get_model_core", "(", "model", ")", ":", "model_label", "=", "lower", "(", "'%s.%s'", "%", "(", "model", ".", "_meta", ".", "app_label", ",", "model", ".", "_meta", ".", "object_name", ")", ")", "return", "registered_model_cores", ".", "get", "(", "model_label", ")" ]
36
11.666667
def ihfft(a, n=None, axis=-1, norm=None): """ Compute the inverse FFT of a signal which has Hermitian symmetry. Parameters ---------- a : array_like Input array. n : int, optional Length of the inverse FFT. Number of points along transformation axis in the input to use. If `n` is smaller than the length of the input, the input is cropped. If it is larger, the input is padded with zeros. If `n` is not given, the length of the input along the axis specified by `axis` is used. axis : int, optional Axis over which to compute the inverse FFT. If not given, the last axis is used. norm : {None, "ortho"}, optional .. versionadded:: 1.10.0 Normalization mode (see `numpy.fft`). Default is None. Returns ------- out : complex ndarray The truncated or zero-padded input, transformed along the axis indicated by `axis`, or the last one if `axis` is not specified. If `n` is even, the length of the transformed axis is ``(n/2)+1``. If `n` is odd, the length is ``(n+1)/2``. See also -------- hfft, irfft Notes ----- `hfft`/`ihfft` are a pair analogous to `rfft`/`irfft`, but for the opposite case: here the signal has Hermitian symmetry in the time domain and is real in the frequency domain. So here it's `hfft` for which you must supply the length of the result if it is to be odd: ``ihfft(hfft(a), len(a)) == a``, within numerical accuracy. Examples -------- >>> spectrum = np.array([ 15, -4, 0, -1, 0, -4]) >>> np.fft.ifft(spectrum) array([ 1.+0.j, 2.-0.j, 3.+0.j, 4.+0.j, 3.+0.j, 2.-0.j]) >>> np.fft.ihfft(spectrum) array([ 1.-0.j, 2.-0.j, 3.-0.j, 4.-0.j]) """ # The copy may be required for multithreading. a = array(a, copy=True, dtype=float) if n is None: n = a.shape[axis] unitary = _unitary(norm) output = conjugate(rfft(a, n, axis)) return output * (1 / (sqrt(n) if unitary else n))
[ "def", "ihfft", "(", "a", ",", "n", "=", "None", ",", "axis", "=", "-", "1", ",", "norm", "=", "None", ")", ":", "# The copy may be required for multithreading.", "a", "=", "array", "(", "a", ",", "copy", "=", "True", ",", "dtype", "=", "float", ")", "if", "n", "is", "None", ":", "n", "=", "a", ".", "shape", "[", "axis", "]", "unitary", "=", "_unitary", "(", "norm", ")", "output", "=", "conjugate", "(", "rfft", "(", "a", ",", "n", ",", "axis", ")", ")", "return", "output", "*", "(", "1", "/", "(", "sqrt", "(", "n", ")", "if", "unitary", "else", "n", ")", ")" ]
35.192982
22.631579
def proxy_num(self, protocol=None): """Get the number of proxies in the pool Args: protocol (str, optional): 'http' or 'https' or None. (default None) Returns: If protocol is None, return the total number of proxies, otherwise, return the number of proxies of corresponding protocol. """ http_num = len(self.proxies['http']) https_num = len(self.proxies['https']) if protocol == 'http': return http_num elif protocol == 'https': return https_num else: return http_num + https_num
[ "def", "proxy_num", "(", "self", ",", "protocol", "=", "None", ")", ":", "http_num", "=", "len", "(", "self", ".", "proxies", "[", "'http'", "]", ")", "https_num", "=", "len", "(", "self", ".", "proxies", "[", "'https'", "]", ")", "if", "protocol", "==", "'http'", ":", "return", "http_num", "elif", "protocol", "==", "'https'", ":", "return", "https_num", "else", ":", "return", "http_num", "+", "https_num" ]
33.777778
17.833333
def requiv_contact_L1(q, sma, compno, **kwargs): """ for the contact case we can make the assumption of aligned, synchronous, and circular """ return requiv_L1(q=q, syncpar=1, ecc=0, sma=sma, incl_star=0, long_an_star=0, incl_orb=0, long_an_orb=0, compno=compno, **kwargs)
[ "def", "requiv_contact_L1", "(", "q", ",", "sma", ",", "compno", ",", "*", "*", "kwargs", ")", ":", "return", "requiv_L1", "(", "q", "=", "q", ",", "syncpar", "=", "1", ",", "ecc", "=", "0", ",", "sma", "=", "sma", ",", "incl_star", "=", "0", ",", "long_an_star", "=", "0", ",", "incl_orb", "=", "0", ",", "long_an_orb", "=", "0", ",", "compno", "=", "compno", ",", "*", "*", "kwargs", ")" ]
56.8
30
def get_instance( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Gets the details of a specific Redis instance. Example: >>> from google.cloud import redis_v1beta1 >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> >>> response = client.get_instance(name) Args: name (str): Required. Redis instance resource name using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1beta1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "get_instance" not in self._inner_api_calls: self._inner_api_calls[ "get_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance, default_retry=self._method_configs["GetInstance"].retry, default_timeout=self._method_configs["GetInstance"].timeout, client_info=self._client_info, ) request = cloud_redis_pb2.GetInstanceRequest(name=name) return self._inner_api_calls["get_instance"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "get_instance", "(", "self", ",", "name", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"get_instance\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"get_instance\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "get_instance", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"GetInstance\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"GetInstance\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "cloud_redis_pb2", ".", "GetInstanceRequest", "(", "name", "=", "name", ")", "return", "self", ".", "_inner_api_calls", "[", "\"get_instance\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
42.894737
24.649123
def get_timestamp(dt=None): ''' Return current timestamp if @dt is None else return timestamp of @dt. >>> t = datetime.datetime(2015, 0o5, 21) >>> get_timestamp(t) 1432166400 ''' if dt is None: dt = datetime.datetime.utcnow() t = dt.utctimetuple() return calendar.timegm(t)
[ "def", "get_timestamp", "(", "dt", "=", "None", ")", ":", "if", "dt", "is", "None", ":", "dt", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "t", "=", "dt", ".", "utctimetuple", "(", ")", "return", "calendar", ".", "timegm", "(", "t", ")" ]
21.642857
20.785714
def process(self, content): """ Process (marshal) the tag with the specified value using the optional type information. @param content: The content to process. @type content: L{Object} """ log.debug('processing:\n%s', content) self.reset() if content.tag is None: content.tag = content.value.__class__.__name__ document = Document() if isinstance(content.value, Property): root = self.node(content) # root is never used? self.append(document, content) else: self.append(document, content) return document.root()
[ "def", "process", "(", "self", ",", "content", ")", ":", "log", ".", "debug", "(", "'processing:\\n%s'", ",", "content", ")", "self", ".", "reset", "(", ")", "if", "content", ".", "tag", "is", "None", ":", "content", ".", "tag", "=", "content", ".", "value", ".", "__class__", ".", "__name__", "document", "=", "Document", "(", ")", "if", "isinstance", "(", "content", ".", "value", ",", "Property", ")", ":", "root", "=", "self", ".", "node", "(", "content", ")", "# root is never used?", "self", ".", "append", "(", "document", ",", "content", ")", "else", ":", "self", ".", "append", "(", "document", ",", "content", ")", "return", "document", ".", "root", "(", ")" ]
35.944444
10.722222
def do_p(self, node): """handle <p> tag The <p> tag is the core of the grammar. It can contain almost anything: freeform text, <choice> tags, <xref> tags, even other <p> tags. If a "class='sentence'" attribute is found, a flag is set and the next word will be capitalized. If a "chance='X'" attribute is found, there is an X% chance that the tag will be evaluated (and therefore a (100-X)% chance that it will be completely ignored) """ keys = node.attributes.keys() if "class" in keys: if node.attributes["class"].value == "sentence": self.capitalizeNextWord = 1 if "chance" in keys: chance = int(node.attributes["chance"].value) doit = (chance > random.randrange(100)) else: doit = 1 if doit: for child in node.childNodes: self.parse(child)
[ "def", "do_p", "(", "self", ",", "node", ")", ":", "keys", "=", "node", ".", "attributes", ".", "keys", "(", ")", "if", "\"class\"", "in", "keys", ":", "if", "node", ".", "attributes", "[", "\"class\"", "]", ".", "value", "==", "\"sentence\"", ":", "self", ".", "capitalizeNextWord", "=", "1", "if", "\"chance\"", "in", "keys", ":", "chance", "=", "int", "(", "node", ".", "attributes", "[", "\"chance\"", "]", ".", "value", ")", "doit", "=", "(", "chance", ">", "random", ".", "randrange", "(", "100", ")", ")", "else", ":", "doit", "=", "1", "if", "doit", ":", "for", "child", "in", "node", ".", "childNodes", ":", "self", ".", "parse", "(", "child", ")" ]
42.818182
18.727273
def getMetricsTimeline(tmaster, component_name, metric_names, instances, start_time, end_time, callback=None): """ Get the specified metrics for the given component name of this topology. Returns the following dict on success: { "timeline": { <metricname>: { <instance>: { <start_time> : <numeric value>, <start_time> : <numeric value>, ... } ... }, ... }, "starttime": <numeric value>, "endtime": <numeric value>, "component": "..." } Returns the following dict on failure: { "message": "..." } """ # Tmaster is the proto object and must have host and port for stats. if not tmaster or not tmaster.host or not tmaster.stats_port: raise Exception("No Tmaster found") host = tmaster.host port = tmaster.stats_port # Create the proto request object to get metrics. metricRequest = tmaster_pb2.MetricRequest() metricRequest.component_name = component_name # If no instances are give, metrics for all instances # are fetched by default. if len(instances) > 0: for instance in instances: metricRequest.instance_id.append(instance) for metricName in metric_names: metricRequest.metric.append(metricName) metricRequest.explicit_interval.start = start_time metricRequest.explicit_interval.end = end_time metricRequest.minutely = True # Serialize the metricRequest to send as a payload # with the HTTP request. metricRequestString = metricRequest.SerializeToString() # Form and send the http request. url = "http://{0}:{1}/stats".format(host, port) request = tornado.httpclient.HTTPRequest(url, body=metricRequestString, method='POST', request_timeout=5) Log.debug("Making HTTP call to fetch metrics") Log.debug("url: " + url) try: client = tornado.httpclient.AsyncHTTPClient() result = yield client.fetch(request) Log.debug("HTTP call complete.") except tornado.httpclient.HTTPError as e: raise Exception(str(e)) # Check the response code - error if it is in 400s or 500s responseCode = result.code if responseCode >= 400: message = "Error in getting metrics from Tmaster, code: " + responseCode Log.error(message) raise Exception(message) # Parse the response from tmaster. metricResponse = tmaster_pb2.MetricResponse() metricResponse.ParseFromString(result.body) if metricResponse.status.status == common_pb2.NOTOK: if metricResponse.status.HasField("message"): Log.warn("Received response from Tmaster: %s", metricResponse.status.message) # Form the response. ret = {} ret["starttime"] = start_time ret["endtime"] = end_time ret["component"] = component_name ret["timeline"] = {} # Loop through all the metrics # One instance corresponds to one metric, which can have # multiple IndividualMetrics for each metricname requested. for metric in metricResponse.metric: instance = metric.instance_id # Loop through all individual metrics. for im in metric.metric: metricname = im.name if metricname not in ret["timeline"]: ret["timeline"][metricname] = {} if instance not in ret["timeline"][metricname]: ret["timeline"][metricname][instance] = {} # We get minutely metrics. # Interval-values correspond to the minutely mark for which # this metric value corresponds to. for interval_value in im.interval_values: ret["timeline"][metricname][instance][interval_value.interval.start] = interval_value.value raise tornado.gen.Return(ret)
[ "def", "getMetricsTimeline", "(", "tmaster", ",", "component_name", ",", "metric_names", ",", "instances", ",", "start_time", ",", "end_time", ",", "callback", "=", "None", ")", ":", "# Tmaster is the proto object and must have host and port for stats.", "if", "not", "tmaster", "or", "not", "tmaster", ".", "host", "or", "not", "tmaster", ".", "stats_port", ":", "raise", "Exception", "(", "\"No Tmaster found\"", ")", "host", "=", "tmaster", ".", "host", "port", "=", "tmaster", ".", "stats_port", "# Create the proto request object to get metrics.", "metricRequest", "=", "tmaster_pb2", ".", "MetricRequest", "(", ")", "metricRequest", ".", "component_name", "=", "component_name", "# If no instances are give, metrics for all instances", "# are fetched by default.", "if", "len", "(", "instances", ")", ">", "0", ":", "for", "instance", "in", "instances", ":", "metricRequest", ".", "instance_id", ".", "append", "(", "instance", ")", "for", "metricName", "in", "metric_names", ":", "metricRequest", ".", "metric", ".", "append", "(", "metricName", ")", "metricRequest", ".", "explicit_interval", ".", "start", "=", "start_time", "metricRequest", ".", "explicit_interval", ".", "end", "=", "end_time", "metricRequest", ".", "minutely", "=", "True", "# Serialize the metricRequest to send as a payload", "# with the HTTP request.", "metricRequestString", "=", "metricRequest", ".", "SerializeToString", "(", ")", "# Form and send the http request.", "url", "=", "\"http://{0}:{1}/stats\"", ".", "format", "(", "host", ",", "port", ")", "request", "=", "tornado", ".", "httpclient", ".", "HTTPRequest", "(", "url", ",", "body", "=", "metricRequestString", ",", "method", "=", "'POST'", ",", "request_timeout", "=", "5", ")", "Log", ".", "debug", "(", "\"Making HTTP call to fetch metrics\"", ")", "Log", ".", "debug", "(", "\"url: \"", "+", "url", ")", "try", ":", "client", "=", "tornado", ".", "httpclient", ".", "AsyncHTTPClient", "(", ")", "result", "=", "yield", "client", ".", "fetch", "(", "request", ")", "Log", ".", "debug", "(", "\"HTTP call complete.\"", ")", "except", "tornado", ".", "httpclient", ".", "HTTPError", "as", "e", ":", "raise", "Exception", "(", "str", "(", "e", ")", ")", "# Check the response code - error if it is in 400s or 500s", "responseCode", "=", "result", ".", "code", "if", "responseCode", ">=", "400", ":", "message", "=", "\"Error in getting metrics from Tmaster, code: \"", "+", "responseCode", "Log", ".", "error", "(", "message", ")", "raise", "Exception", "(", "message", ")", "# Parse the response from tmaster.", "metricResponse", "=", "tmaster_pb2", ".", "MetricResponse", "(", ")", "metricResponse", ".", "ParseFromString", "(", "result", ".", "body", ")", "if", "metricResponse", ".", "status", ".", "status", "==", "common_pb2", ".", "NOTOK", ":", "if", "metricResponse", ".", "status", ".", "HasField", "(", "\"message\"", ")", ":", "Log", ".", "warn", "(", "\"Received response from Tmaster: %s\"", ",", "metricResponse", ".", "status", ".", "message", ")", "# Form the response.", "ret", "=", "{", "}", "ret", "[", "\"starttime\"", "]", "=", "start_time", "ret", "[", "\"endtime\"", "]", "=", "end_time", "ret", "[", "\"component\"", "]", "=", "component_name", "ret", "[", "\"timeline\"", "]", "=", "{", "}", "# Loop through all the metrics", "# One instance corresponds to one metric, which can have", "# multiple IndividualMetrics for each metricname requested.", "for", "metric", "in", "metricResponse", ".", "metric", ":", "instance", "=", "metric", ".", "instance_id", "# Loop through all individual metrics.", "for", "im", "in", "metric", ".", "metric", ":", "metricname", "=", "im", ".", "name", "if", "metricname", "not", "in", "ret", "[", "\"timeline\"", "]", ":", "ret", "[", "\"timeline\"", "]", "[", "metricname", "]", "=", "{", "}", "if", "instance", "not", "in", "ret", "[", "\"timeline\"", "]", "[", "metricname", "]", ":", "ret", "[", "\"timeline\"", "]", "[", "metricname", "]", "[", "instance", "]", "=", "{", "}", "# We get minutely metrics.", "# Interval-values correspond to the minutely mark for which", "# this metric value corresponds to.", "for", "interval_value", "in", "im", ".", "interval_values", ":", "ret", "[", "\"timeline\"", "]", "[", "metricname", "]", "[", "instance", "]", "[", "interval_value", ".", "interval", ".", "start", "]", "=", "interval_value", ".", "value", "raise", "tornado", ".", "gen", ".", "Return", "(", "ret", ")" ]
30.966667
18.15
def allow_client_incoming(self, client_name): """ Allow the user of this token to accept incoming connections. :param str client_name: Client name to accept calls from """ self.client_name = client_name self.capabilities['incoming'] = ScopeURI('client', 'incoming', {'clientName': client_name})
[ "def", "allow_client_incoming", "(", "self", ",", "client_name", ")", ":", "self", ".", "client_name", "=", "client_name", "self", ".", "capabilities", "[", "'incoming'", "]", "=", "ScopeURI", "(", "'client'", ",", "'incoming'", ",", "{", "'clientName'", ":", "client_name", "}", ")" ]
42
19.75
def BL(self, params): """ BL label Branch to the label, storing the next instruction in the Link Register """ label = self.get_one_parameter(self.ONE_PARAMETER, params) self.check_arguments(label_exists=(label,)) # TODO check if label is within +- 16 MB # BL label def BL_func(): self.register['LR'] = self.register['PC'] # No need for the + 1, PC already points to the next instruction self.register['PC'] = self.labels[label] return BL_func
[ "def", "BL", "(", "self", ",", "params", ")", ":", "label", "=", "self", ".", "get_one_parameter", "(", "self", ".", "ONE_PARAMETER", ",", "params", ")", "self", ".", "check_arguments", "(", "label_exists", "=", "(", "label", ",", ")", ")", "# TODO check if label is within +- 16 MB", "# BL label", "def", "BL_func", "(", ")", ":", "self", ".", "register", "[", "'LR'", "]", "=", "self", ".", "register", "[", "'PC'", "]", "# No need for the + 1, PC already points to the next instruction", "self", ".", "register", "[", "'PC'", "]", "=", "self", ".", "labels", "[", "label", "]", "return", "BL_func" ]
31.470588
25.588235
def collapse_user(fp): """ Converts a path back to ~/ from expanduser() """ home_dir = os.path.expanduser("~") abs_path = os.path.abspath(fp) return abs_path.replace(home_dir, "~")
[ "def", "collapse_user", "(", "fp", ")", ":", "home_dir", "=", "os", ".", "path", ".", "expanduser", "(", "\"~\"", ")", "abs_path", "=", "os", ".", "path", ".", "abspath", "(", "fp", ")", "return", "abs_path", ".", "replace", "(", "home_dir", ",", "\"~\"", ")" ]
28.285714
5.142857
def get(self, sid): """ Constructs a MemberContext :param sid: The unique string that identifies the resource :returns: twilio.rest.chat.v2.service.channel.member.MemberContext :rtype: twilio.rest.chat.v2.service.channel.member.MemberContext """ return MemberContext( self._version, service_sid=self._solution['service_sid'], channel_sid=self._solution['channel_sid'], sid=sid, )
[ "def", "get", "(", "self", ",", "sid", ")", ":", "return", "MemberContext", "(", "self", ".", "_version", ",", "service_sid", "=", "self", ".", "_solution", "[", "'service_sid'", "]", ",", "channel_sid", "=", "self", ".", "_solution", "[", "'channel_sid'", "]", ",", "sid", "=", "sid", ",", ")" ]
31.933333
20.2
def tags_check_convert(cls, tags): """Accept one tag as string or multiple tags in list of strings. Returns list (with tags in unicode form) or raises ValueError """ # single string check comes first since string is also a Sequence if isinstance(tags, string_types): return [cls.__tag_check_convert(tags)] elif isinstance(tags, Sequence): if not tags: raise ValueError("Tag list is empty") return [cls.__tag_check_convert(tag) for tag in tags] else: raise ValueError("tags must be a single string or list of sequence of strings")
[ "def", "tags_check_convert", "(", "cls", ",", "tags", ")", ":", "# single string check comes first since string is also a Sequence", "if", "isinstance", "(", "tags", ",", "string_types", ")", ":", "return", "[", "cls", ".", "__tag_check_convert", "(", "tags", ")", "]", "elif", "isinstance", "(", "tags", ",", "Sequence", ")", ":", "if", "not", "tags", ":", "raise", "ValueError", "(", "\"Tag list is empty\"", ")", "return", "[", "cls", ".", "__tag_check_convert", "(", "tag", ")", "for", "tag", "in", "tags", "]", "else", ":", "raise", "ValueError", "(", "\"tags must be a single string or list of sequence of strings\"", ")" ]
49
16.307692
def to_d(l): """ Converts list of dicts to dict. """ _d = {} for x in l: for k, v in x.items(): _d[k] = v return _d
[ "def", "to_d", "(", "l", ")", ":", "_d", "=", "{", "}", "for", "x", "in", "l", ":", "for", "k", ",", "v", "in", "x", ".", "items", "(", ")", ":", "_d", "[", "k", "]", "=", "v", "return", "_d" ]
16.777778
15.888889
def conditional_resize(image, ratio, width=None, height=None, upcrop=True, namespace="resized"): """ Crop the image based on a ratio If upcrop is true, crops the images that have a higher ratio than the given ratio, if false crops the images that have a lower ratio """ aspect = float(image.width) / float(image.height) crop = False if (aspect > ratio and upcrop) or (aspect <= ratio and not upcrop): crop = True return resize_lazy(image=image, width=width, height=height, crop=crop, namespace=namespace, as_url=True)
[ "def", "conditional_resize", "(", "image", ",", "ratio", ",", "width", "=", "None", ",", "height", "=", "None", ",", "upcrop", "=", "True", ",", "namespace", "=", "\"resized\"", ")", ":", "aspect", "=", "float", "(", "image", ".", "width", ")", "/", "float", "(", "image", ".", "height", ")", "crop", "=", "False", "if", "(", "aspect", ">", "ratio", "and", "upcrop", ")", "or", "(", "aspect", "<=", "ratio", "and", "not", "upcrop", ")", ":", "crop", "=", "True", "return", "resize_lazy", "(", "image", "=", "image", ",", "width", "=", "width", ",", "height", "=", "height", ",", "crop", "=", "crop", ",", "namespace", "=", "namespace", ",", "as_url", "=", "True", ")" ]
37.1875
22.5625
def render_toctrees(kb_app: kb, sphinx_app: Sphinx, doctree: doctree, fromdocname: str): """ Look in doctrees for toctree and replace with custom render """ # Only do any of this if toctree support is turned on in KaybeeSettings. # By default, this is off. settings: KaybeeSettings = sphinx_app.config.kaybee_settings if not settings.articles.use_toctree: return # Setup a template and context builder: StandaloneHTMLBuilder = sphinx_app.builder env: BuildEnvironment = sphinx_app.env # Toctree support. First, get the registered toctree class, if any registered_toctree = ToctreeAction.get_for_context(kb_app) for node in doctree.traverse(toctree): if node.attributes['hidden']: continue custom_toctree = registered_toctree(fromdocname) context = builder.globalcontext.copy() context['sphinx_app'] = sphinx_app # Get the toctree entries. We only handle one level of depth for # now. To go further, we need to recurse like sphinx's # adapters.toctree._toctree_add_classes function entries = node.attributes['entries'] # The challenge here is that some items in a toctree # might not be resources in our "database". So we have # to ask Sphinx to get us the titles. custom_toctree.set_entries(entries, env.titles, sphinx_app.env.resources) output = custom_toctree.render(builder, context, sphinx_app) # Put the output into the node contents listing = [nodes.raw('', output, format='html')] node.replace_self(listing)
[ "def", "render_toctrees", "(", "kb_app", ":", "kb", ",", "sphinx_app", ":", "Sphinx", ",", "doctree", ":", "doctree", ",", "fromdocname", ":", "str", ")", ":", "# Only do any of this if toctree support is turned on in KaybeeSettings.", "# By default, this is off.", "settings", ":", "KaybeeSettings", "=", "sphinx_app", ".", "config", ".", "kaybee_settings", "if", "not", "settings", ".", "articles", ".", "use_toctree", ":", "return", "# Setup a template and context", "builder", ":", "StandaloneHTMLBuilder", "=", "sphinx_app", ".", "builder", "env", ":", "BuildEnvironment", "=", "sphinx_app", ".", "env", "# Toctree support. First, get the registered toctree class, if any", "registered_toctree", "=", "ToctreeAction", ".", "get_for_context", "(", "kb_app", ")", "for", "node", "in", "doctree", ".", "traverse", "(", "toctree", ")", ":", "if", "node", ".", "attributes", "[", "'hidden'", "]", ":", "continue", "custom_toctree", "=", "registered_toctree", "(", "fromdocname", ")", "context", "=", "builder", ".", "globalcontext", ".", "copy", "(", ")", "context", "[", "'sphinx_app'", "]", "=", "sphinx_app", "# Get the toctree entries. We only handle one level of depth for", "# now. To go further, we need to recurse like sphinx's", "# adapters.toctree._toctree_add_classes function", "entries", "=", "node", ".", "attributes", "[", "'entries'", "]", "# The challenge here is that some items in a toctree", "# might not be resources in our \"database\". So we have", "# to ask Sphinx to get us the titles.", "custom_toctree", ".", "set_entries", "(", "entries", ",", "env", ".", "titles", ",", "sphinx_app", ".", "env", ".", "resources", ")", "output", "=", "custom_toctree", ".", "render", "(", "builder", ",", "context", ",", "sphinx_app", ")", "# Put the output into the node contents", "listing", "=", "[", "nodes", ".", "raw", "(", "''", ",", "output", ",", "format", "=", "'html'", ")", "]", "node", ".", "replace_self", "(", "listing", ")" ]
42.894737
18.552632
def get(key, default=None): ''' Get a (list of) value(s) from the minion datastore .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' data.get key salt '*' data.get '["key1", "key2"]' ''' store = load() if isinstance(key, six.string_types): return store.get(key, default) elif default is None: return [store[k] for k in key if k in store] else: return [store.get(k, default) for k in key]
[ "def", "get", "(", "key", ",", "default", "=", "None", ")", ":", "store", "=", "load", "(", ")", "if", "isinstance", "(", "key", ",", "six", ".", "string_types", ")", ":", "return", "store", ".", "get", "(", "key", ",", "default", ")", "elif", "default", "is", "None", ":", "return", "[", "store", "[", "k", "]", "for", "k", "in", "key", "if", "k", "in", "store", "]", "else", ":", "return", "[", "store", ".", "get", "(", "k", ",", "default", ")", "for", "k", "in", "key", "]" ]
22.47619
21.52381
def FromDictionary(cls, dictionary): """Initializes from user specified dictionary. Args: dictionary: dict of user specified attributes Returns: GitkitUser object """ if 'user_id' in dictionary: raise errors.GitkitClientError('use localId instead') if 'localId' not in dictionary: raise errors.GitkitClientError('must specify localId') if 'email' not in dictionary: raise errors.GitkitClientError('must specify email') return cls(decode=False, **dictionary)
[ "def", "FromDictionary", "(", "cls", ",", "dictionary", ")", ":", "if", "'user_id'", "in", "dictionary", ":", "raise", "errors", ".", "GitkitClientError", "(", "'use localId instead'", ")", "if", "'localId'", "not", "in", "dictionary", ":", "raise", "errors", ".", "GitkitClientError", "(", "'must specify localId'", ")", "if", "'email'", "not", "in", "dictionary", ":", "raise", "errors", ".", "GitkitClientError", "(", "'must specify email'", ")", "return", "cls", "(", "decode", "=", "False", ",", "*", "*", "dictionary", ")" ]
31.625
15.6875
def unstack(self, dimensions=None, **dimensions_kwargs): """ Unstack an existing dimension into multiple new dimensions. New dimensions will be added at the end, and the order of the data along each new dimension will be in contiguous (C) order. Parameters ---------- dimensions : mapping of the form old_dim={dim1: size1, ...} Names of existing dimensions, and the new dimensions and sizes that they map to. **dimensions_kwargs: The keyword arguments form of ``dimensions``. One of dimensions or dimensions_kwargs must be provided. Returns ------- unstacked : Variable Variable with the same attributes but unstacked data. See also -------- Variable.stack """ dimensions = either_dict_or_kwargs(dimensions, dimensions_kwargs, 'unstack') result = self for old_dim, dims in dimensions.items(): result = result._unstack_once(dims, old_dim) return result
[ "def", "unstack", "(", "self", ",", "dimensions", "=", "None", ",", "*", "*", "dimensions_kwargs", ")", ":", "dimensions", "=", "either_dict_or_kwargs", "(", "dimensions", ",", "dimensions_kwargs", ",", "'unstack'", ")", "result", "=", "self", "for", "old_dim", ",", "dims", "in", "dimensions", ".", "items", "(", ")", ":", "result", "=", "result", ".", "_unstack_once", "(", "dims", ",", "old_dim", ")", "return", "result" ]
35.225806
22.451613
def GenerateHelpText(self, env, sort=None): """ Generate the help text for the options. env - an environment that is used to get the current values of the options. cmp - Either a function as follows: The specific sort function should take two arguments and return -1, 0 or 1 or a boolean to indicate if it should be sorted. """ if callable(sort): options = sorted(self.options, key=cmp_to_key(lambda x,y: sort(x.key,y.key))) elif sort is True: options = sorted(self.options, key=lambda x: x.key) else: options = self.options def format(opt, self=self, env=env): if opt.key in env: actual = env.subst('${%s}' % opt.key) else: actual = None return self.FormatVariableHelpText(env, opt.key, opt.help, opt.default, actual, opt.aliases) lines = [_f for _f in map(format, options) if _f] return ''.join(lines)
[ "def", "GenerateHelpText", "(", "self", ",", "env", ",", "sort", "=", "None", ")", ":", "if", "callable", "(", "sort", ")", ":", "options", "=", "sorted", "(", "self", ".", "options", ",", "key", "=", "cmp_to_key", "(", "lambda", "x", ",", "y", ":", "sort", "(", "x", ".", "key", ",", "y", ".", "key", ")", ")", ")", "elif", "sort", "is", "True", ":", "options", "=", "sorted", "(", "self", ".", "options", ",", "key", "=", "lambda", "x", ":", "x", ".", "key", ")", "else", ":", "options", "=", "self", ".", "options", "def", "format", "(", "opt", ",", "self", "=", "self", ",", "env", "=", "env", ")", ":", "if", "opt", ".", "key", "in", "env", ":", "actual", "=", "env", ".", "subst", "(", "'${%s}'", "%", "opt", ".", "key", ")", "else", ":", "actual", "=", "None", "return", "self", ".", "FormatVariableHelpText", "(", "env", ",", "opt", ".", "key", ",", "opt", ".", "help", ",", "opt", ".", "default", ",", "actual", ",", "opt", ".", "aliases", ")", "lines", "=", "[", "_f", "for", "_f", "in", "map", "(", "format", ",", "options", ")", "if", "_f", "]", "return", "''", ".", "join", "(", "lines", ")" ]
38.576923
22.884615
def _set_sort_expressions(self): """Construct the query: sorting. Add sorting(ORDER BY) on the columns needed to be applied on. """ sort_expressions = [] i = 0 while self.params.get('order[{:d}][column]'.format(i), False): column_nr = int(self.params.get('order[{:d}][column]'.format(i))) column = self.columns[column_nr] direction = self.params.get('order[{:d}][dir]'.format(i)) sort_expr = column.sqla_expr if direction == 'asc': sort_expr = sort_expr.asc() elif direction == 'desc': sort_expr = sort_expr.desc() else: raise ValueError( 'Invalid order direction: {}'.format(direction)) if column.nulls_order: if column.nulls_order == 'nullsfirst': sort_expr = sort_expr.nullsfirst() elif column.nulls_order == 'nullslast': sort_expr = sort_expr.nullslast() else: raise ValueError( 'Invalid order direction: {}'.format(direction)) sort_expressions.append(sort_expr) i += 1 self.sort_expressions = sort_expressions
[ "def", "_set_sort_expressions", "(", "self", ")", ":", "sort_expressions", "=", "[", "]", "i", "=", "0", "while", "self", ".", "params", ".", "get", "(", "'order[{:d}][column]'", ".", "format", "(", "i", ")", ",", "False", ")", ":", "column_nr", "=", "int", "(", "self", ".", "params", ".", "get", "(", "'order[{:d}][column]'", ".", "format", "(", "i", ")", ")", ")", "column", "=", "self", ".", "columns", "[", "column_nr", "]", "direction", "=", "self", ".", "params", ".", "get", "(", "'order[{:d}][dir]'", ".", "format", "(", "i", ")", ")", "sort_expr", "=", "column", ".", "sqla_expr", "if", "direction", "==", "'asc'", ":", "sort_expr", "=", "sort_expr", ".", "asc", "(", ")", "elif", "direction", "==", "'desc'", ":", "sort_expr", "=", "sort_expr", ".", "desc", "(", ")", "else", ":", "raise", "ValueError", "(", "'Invalid order direction: {}'", ".", "format", "(", "direction", ")", ")", "if", "column", ".", "nulls_order", ":", "if", "column", ".", "nulls_order", "==", "'nullsfirst'", ":", "sort_expr", "=", "sort_expr", ".", "nullsfirst", "(", ")", "elif", "column", ".", "nulls_order", "==", "'nullslast'", ":", "sort_expr", "=", "sort_expr", ".", "nullslast", "(", ")", "else", ":", "raise", "ValueError", "(", "'Invalid order direction: {}'", ".", "format", "(", "direction", ")", ")", "sort_expressions", ".", "append", "(", "sort_expr", ")", "i", "+=", "1", "self", ".", "sort_expressions", "=", "sort_expressions" ]
40.709677
15.516129
def pull(self, images, file_name=None, save=True, **kwargs): '''pull an image from gitlab. The image is found based on the uri that should correspond to a gitlab repository, and then the branch, job name, artifact folder, and tag of the container. The minimum that we need are the job id, collection, and job name. Eg: job_id|collection|job_name (or) job_id|collection Parameters ========== images: refers to the uri given by the user to pull in the format specified above file_name: the user's requested name for the file. It can optionally be None if the user wants a default. save: if True, you should save the container to the database using self.add() Returns ======= finished: a single container path, or list of paths ''' force = False if "force" in kwargs: force = kwargs['force'] if not isinstance(images, list): images = [images] bot.debug('Execution of PULL for %s images' %len(images)) # If used internally we want to return a list to the user. finished = [] for image in images: # Format job_id|collection|job_name # 122056733,singularityhub/gitlab-ci' # 122056733,singularityhub/gitlab-ci,build job_id, collection, job_name = self._parse_image_name(image) names = parse_image_name(remove_uri(collection)) # If the user didn't provide a file, make one based on the names if file_name is None: file_name = self._get_storage_name(names) # If the file already exists and force is False if os.path.exists(file_name) and force is False: bot.error('Image exists! Remove first, or use --force to overwrite') sys.exit(1) # Put together the GitLab URI image_name = "Singularity.%s.simg" %(names['tag']) if names['tag'] == 'latest': image_name = "Singularity.simg" # Assemble artifact path artifact_path = "%s/%s" %(self.artifacts, image_name) bot.info('Looking for artifact %s for job name %s, %s' %(artifact_path, job_name, job_id)) project = quote_plus(collection.strip('/')) # This is supposed to work, but it doesn't # url = "%s/projects/%s/jobs/%s/artifacts/file/%s" %(self.api_base, # project, job_id, # artifact_path) # This does work :) url = "%s/%s/-/jobs/%s/artifacts/raw/%s/?inline=false" % (self.base, collection, job_id, artifact_path) bot.info(url) # stream the url content to the file name image_file = self.download(url=url, file_name=file_name, show_progress=True) metadata = self._get_metadata() metadata['collection'] = collection metadata['job_id'] = job_id metadata['job_name'] = job_name metadata['artifact_path'] = artifact_path metadata['sregistry_pull'] = image # If we save to storage, the uri is the dropbox_path if save is True: container = self.add(image_path = image_file, image_uri = image, metadata = metadata, url = url) # When the container is created, this is the path to the image image_file = container.image if os.path.exists(image_file): bot.debug('Retrieved image file %s' %image_file) bot.custom(prefix="Success!", message=image_file) finished.append(image_file) if len(finished) == 1: finished = finished[0] return finished
[ "def", "pull", "(", "self", ",", "images", ",", "file_name", "=", "None", ",", "save", "=", "True", ",", "*", "*", "kwargs", ")", ":", "force", "=", "False", "if", "\"force\"", "in", "kwargs", ":", "force", "=", "kwargs", "[", "'force'", "]", "if", "not", "isinstance", "(", "images", ",", "list", ")", ":", "images", "=", "[", "images", "]", "bot", ".", "debug", "(", "'Execution of PULL for %s images'", "%", "len", "(", "images", ")", ")", "# If used internally we want to return a list to the user.", "finished", "=", "[", "]", "for", "image", "in", "images", ":", "# Format job_id|collection|job_name", "# 122056733,singularityhub/gitlab-ci'", "# 122056733,singularityhub/gitlab-ci,build", "job_id", ",", "collection", ",", "job_name", "=", "self", ".", "_parse_image_name", "(", "image", ")", "names", "=", "parse_image_name", "(", "remove_uri", "(", "collection", ")", ")", "# If the user didn't provide a file, make one based on the names", "if", "file_name", "is", "None", ":", "file_name", "=", "self", ".", "_get_storage_name", "(", "names", ")", "# If the file already exists and force is False", "if", "os", ".", "path", ".", "exists", "(", "file_name", ")", "and", "force", "is", "False", ":", "bot", ".", "error", "(", "'Image exists! Remove first, or use --force to overwrite'", ")", "sys", ".", "exit", "(", "1", ")", "# Put together the GitLab URI", "image_name", "=", "\"Singularity.%s.simg\"", "%", "(", "names", "[", "'tag'", "]", ")", "if", "names", "[", "'tag'", "]", "==", "'latest'", ":", "image_name", "=", "\"Singularity.simg\"", "# Assemble artifact path", "artifact_path", "=", "\"%s/%s\"", "%", "(", "self", ".", "artifacts", ",", "image_name", ")", "bot", ".", "info", "(", "'Looking for artifact %s for job name %s, %s'", "%", "(", "artifact_path", ",", "job_name", ",", "job_id", ")", ")", "project", "=", "quote_plus", "(", "collection", ".", "strip", "(", "'/'", ")", ")", "# This is supposed to work, but it doesn't", "# url = \"%s/projects/%s/jobs/%s/artifacts/file/%s\" %(self.api_base, ", "# project, job_id,", "# artifact_path)", "# This does work :)", "url", "=", "\"%s/%s/-/jobs/%s/artifacts/raw/%s/?inline=false\"", "%", "(", "self", ".", "base", ",", "collection", ",", "job_id", ",", "artifact_path", ")", "bot", ".", "info", "(", "url", ")", "# stream the url content to the file name", "image_file", "=", "self", ".", "download", "(", "url", "=", "url", ",", "file_name", "=", "file_name", ",", "show_progress", "=", "True", ")", "metadata", "=", "self", ".", "_get_metadata", "(", ")", "metadata", "[", "'collection'", "]", "=", "collection", "metadata", "[", "'job_id'", "]", "=", "job_id", "metadata", "[", "'job_name'", "]", "=", "job_name", "metadata", "[", "'artifact_path'", "]", "=", "artifact_path", "metadata", "[", "'sregistry_pull'", "]", "=", "image", "# If we save to storage, the uri is the dropbox_path", "if", "save", "is", "True", ":", "container", "=", "self", ".", "add", "(", "image_path", "=", "image_file", ",", "image_uri", "=", "image", ",", "metadata", "=", "metadata", ",", "url", "=", "url", ")", "# When the container is created, this is the path to the image", "image_file", "=", "container", ".", "image", "if", "os", ".", "path", ".", "exists", "(", "image_file", ")", ":", "bot", ".", "debug", "(", "'Retrieved image file %s'", "%", "image_file", ")", "bot", ".", "custom", "(", "prefix", "=", "\"Success!\"", ",", "message", "=", "image_file", ")", "finished", ".", "append", "(", "image_file", ")", "if", "len", "(", "finished", ")", "==", "1", ":", "finished", "=", "finished", "[", "0", "]", "return", "finished" ]
37.605505
22.669725
def ar_periodogram(x, window='hanning', window_len=7): """ Compute periodogram from data x, using prewhitening, smoothing and recoloring. The data is fitted to an AR(1) model for prewhitening, and the residuals are used to compute a first-pass periodogram with smoothing. The fitted coefficients are then used for recoloring. Parameters ---------- x : array_like(float) A flat NumPy array containing the data to smooth window_len : scalar(int), optional An odd integer giving the length of the window. Defaults to 7. window : string A string giving the window type. Possible values are 'flat', 'hanning', 'hamming', 'bartlett' or 'blackman' Returns ------- w : array_like(float) Fourier frequences at which periodogram is evaluated I_w : array_like(float) Values of periodogram at the Fourier frequences """ # === run regression === # x_lag = x[:-1] # lagged x X = np.array([np.ones(len(x_lag)), x_lag]).T # add constant y = np.array(x[1:]) # current x beta_hat = np.linalg.solve(X.T @ X, X.T @ y) # solve for beta hat e_hat = y - X @ beta_hat # compute residuals phi = beta_hat[1] # pull out phi parameter # === compute periodogram on residuals === # w, I_w = periodogram(e_hat, window=window, window_len=window_len) # === recolor and return === # I_w = I_w / np.abs(1 - phi * np.exp(1j * w))**2 return w, I_w
[ "def", "ar_periodogram", "(", "x", ",", "window", "=", "'hanning'", ",", "window_len", "=", "7", ")", ":", "# === run regression === #", "x_lag", "=", "x", "[", ":", "-", "1", "]", "# lagged x", "X", "=", "np", ".", "array", "(", "[", "np", ".", "ones", "(", "len", "(", "x_lag", ")", ")", ",", "x_lag", "]", ")", ".", "T", "# add constant", "y", "=", "np", ".", "array", "(", "x", "[", "1", ":", "]", ")", "# current x", "beta_hat", "=", "np", ".", "linalg", ".", "solve", "(", "X", ".", "T", "@", "X", ",", "X", ".", "T", "@", "y", ")", "# solve for beta hat", "e_hat", "=", "y", "-", "X", "@", "beta_hat", "# compute residuals", "phi", "=", "beta_hat", "[", "1", "]", "# pull out phi parameter", "# === compute periodogram on residuals === #", "w", ",", "I_w", "=", "periodogram", "(", "e_hat", ",", "window", "=", "window", ",", "window_len", "=", "window_len", ")", "# === recolor and return === #", "I_w", "=", "I_w", "/", "np", ".", "abs", "(", "1", "-", "phi", "*", "np", ".", "exp", "(", "1j", "*", "w", ")", ")", "**", "2", "return", "w", ",", "I_w" ]
34.333333
22.047619
def name(self) -> str: """Return template's name (includes whitespace).""" h = self._atomic_partition(self._first_arg_sep)[0] if len(h) == len(self.string): return h[2:-2] return h[2:]
[ "def", "name", "(", "self", ")", "->", "str", ":", "h", "=", "self", ".", "_atomic_partition", "(", "self", ".", "_first_arg_sep", ")", "[", "0", "]", "if", "len", "(", "h", ")", "==", "len", "(", "self", ".", "string", ")", ":", "return", "h", "[", "2", ":", "-", "2", "]", "return", "h", "[", "2", ":", "]" ]
37.166667
12
def inodeusage(args=None): ''' Return inode usage information for volumes mounted on this minion CLI Example: .. code-block:: bash salt '*' disk.inodeusage ''' flags = _clean_flags(args, 'disk.inodeusage') if __grains__['kernel'] == 'AIX': cmd = 'df -i' else: cmd = 'df -iP' if flags: cmd += ' -{0}'.format(flags) ret = {} out = __salt__['cmd.run'](cmd, python_shell=False).splitlines() for line in out: if line.startswith('Filesystem'): continue comps = line.split() # Don't choke on empty lines if not comps: continue try: if __grains__['kernel'] == 'OpenBSD': ret[comps[8]] = { 'inodes': int(comps[5]) + int(comps[6]), 'used': comps[5], 'free': comps[6], 'use': comps[7], 'filesystem': comps[0], } elif __grains__['kernel'] == 'AIX': ret[comps[6]] = { 'inodes': comps[4], 'used': comps[5], 'free': comps[2], 'use': comps[5], 'filesystem': comps[0], } else: ret[comps[5]] = { 'inodes': comps[1], 'used': comps[2], 'free': comps[3], 'use': comps[4], 'filesystem': comps[0], } except (IndexError, ValueError): log.error('Problem parsing inode usage information') ret = {} return ret
[ "def", "inodeusage", "(", "args", "=", "None", ")", ":", "flags", "=", "_clean_flags", "(", "args", ",", "'disk.inodeusage'", ")", "if", "__grains__", "[", "'kernel'", "]", "==", "'AIX'", ":", "cmd", "=", "'df -i'", "else", ":", "cmd", "=", "'df -iP'", "if", "flags", ":", "cmd", "+=", "' -{0}'", ".", "format", "(", "flags", ")", "ret", "=", "{", "}", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", ".", "splitlines", "(", ")", "for", "line", "in", "out", ":", "if", "line", ".", "startswith", "(", "'Filesystem'", ")", ":", "continue", "comps", "=", "line", ".", "split", "(", ")", "# Don't choke on empty lines", "if", "not", "comps", ":", "continue", "try", ":", "if", "__grains__", "[", "'kernel'", "]", "==", "'OpenBSD'", ":", "ret", "[", "comps", "[", "8", "]", "]", "=", "{", "'inodes'", ":", "int", "(", "comps", "[", "5", "]", ")", "+", "int", "(", "comps", "[", "6", "]", ")", ",", "'used'", ":", "comps", "[", "5", "]", ",", "'free'", ":", "comps", "[", "6", "]", ",", "'use'", ":", "comps", "[", "7", "]", ",", "'filesystem'", ":", "comps", "[", "0", "]", ",", "}", "elif", "__grains__", "[", "'kernel'", "]", "==", "'AIX'", ":", "ret", "[", "comps", "[", "6", "]", "]", "=", "{", "'inodes'", ":", "comps", "[", "4", "]", ",", "'used'", ":", "comps", "[", "5", "]", ",", "'free'", ":", "comps", "[", "2", "]", ",", "'use'", ":", "comps", "[", "5", "]", ",", "'filesystem'", ":", "comps", "[", "0", "]", ",", "}", "else", ":", "ret", "[", "comps", "[", "5", "]", "]", "=", "{", "'inodes'", ":", "comps", "[", "1", "]", ",", "'used'", ":", "comps", "[", "2", "]", ",", "'free'", ":", "comps", "[", "3", "]", ",", "'use'", ":", "comps", "[", "4", "]", ",", "'filesystem'", ":", "comps", "[", "0", "]", ",", "}", "except", "(", "IndexError", ",", "ValueError", ")", ":", "log", ".", "error", "(", "'Problem parsing inode usage information'", ")", "ret", "=", "{", "}", "return", "ret" ]
29.339286
15.482143
def _parse_parameters(self, resource, params): '''Creates a dictionary from query_string and `params` Transforms the `?key=value&...` to a {'key': 'value'} and adds (or overwrites if already present) the value with the dictionary in `params`. ''' # remove params from resource URI (needed for paginated stuff) parsed_uri = urlparse(resource) qs = parsed_uri.query resource = urlunparse(parsed_uri._replace(query='')) prms = {} for tup in parse_qsl(qs): prms[tup[0]] = tup[1] # params supplied to self.get() override parsed params for key in params: prms[key] = params[key] return resource, prms
[ "def", "_parse_parameters", "(", "self", ",", "resource", ",", "params", ")", ":", "# remove params from resource URI (needed for paginated stuff)", "parsed_uri", "=", "urlparse", "(", "resource", ")", "qs", "=", "parsed_uri", ".", "query", "resource", "=", "urlunparse", "(", "parsed_uri", ".", "_replace", "(", "query", "=", "''", ")", ")", "prms", "=", "{", "}", "for", "tup", "in", "parse_qsl", "(", "qs", ")", ":", "prms", "[", "tup", "[", "0", "]", "]", "=", "tup", "[", "1", "]", "# params supplied to self.get() override parsed params", "for", "key", "in", "params", ":", "prms", "[", "key", "]", "=", "params", "[", "key", "]", "return", "resource", ",", "prms" ]
37.578947
19.789474
def as_objective(obj): """Convert obj into Objective class. Strings of the form "layer:n" become the Objective channel(layer, n). Objectives are returned unchanged. Args: obj: string or Objective. Returns: Objective """ if isinstance(obj, Objective): return obj elif callable(obj): return obj elif isinstance(obj, str): layer, n = obj.split(":") layer, n = layer.strip(), int(n) return channel(layer, n)
[ "def", "as_objective", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "Objective", ")", ":", "return", "obj", "elif", "callable", "(", "obj", ")", ":", "return", "obj", "elif", "isinstance", "(", "obj", ",", "str", ")", ":", "layer", ",", "n", "=", "obj", ".", "split", "(", "\":\"", ")", "layer", ",", "n", "=", "layer", ".", "strip", "(", ")", ",", "int", "(", "n", ")", "return", "channel", "(", "layer", ",", "n", ")" ]
21.65
19.6
def load_config(config, expand_env=False, force=False): """Return repos from a directory and fnmatch. Not recursive. :param config: paths to config file :type config: str :param expand_env: True to expand environment varialbes in the config. :type expand_env: bool :param bool force: True to aggregate even if repo is dirty. :returns: expanded config dict item :rtype: iter(dict) """ if not os.path.exists(config): raise ConfigException('Unable to find configuration file: %s' % config) file_extension = os.path.splitext(config)[1][1:] conf = kaptan.Kaptan(handler=kaptan.HANDLER_EXT.get(file_extension)) if expand_env: with open(config, 'r') as file_handler: config = Template(file_handler.read()) config = config.substitute(os.environ) conf.import_config(config) return get_repos(conf.export('dict') or {}, force)
[ "def", "load_config", "(", "config", ",", "expand_env", "=", "False", ",", "force", "=", "False", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "config", ")", ":", "raise", "ConfigException", "(", "'Unable to find configuration file: %s'", "%", "config", ")", "file_extension", "=", "os", ".", "path", ".", "splitext", "(", "config", ")", "[", "1", "]", "[", "1", ":", "]", "conf", "=", "kaptan", ".", "Kaptan", "(", "handler", "=", "kaptan", ".", "HANDLER_EXT", ".", "get", "(", "file_extension", ")", ")", "if", "expand_env", ":", "with", "open", "(", "config", ",", "'r'", ")", "as", "file_handler", ":", "config", "=", "Template", "(", "file_handler", ".", "read", "(", ")", ")", "config", "=", "config", ".", "substitute", "(", "os", ".", "environ", ")", "conf", ".", "import_config", "(", "config", ")", "return", "get_repos", "(", "conf", ".", "export", "(", "'dict'", ")", "or", "{", "}", ",", "force", ")" ]
37.333333
18.625
def replace_surrogate_decode(mybytes): """ Returns a (unicode) string """ decoded = [] for ch in mybytes: # We may be parsing newbytes (in which case ch is an int) or a native # str on Py2 if isinstance(ch, int): code = ch else: code = ord(ch) if 0x80 <= code <= 0xFF: decoded.append(_unichr(0xDC00 + code)) elif code <= 0x7F: decoded.append(_unichr(code)) else: # # It may be a bad byte # # Try swallowing it. # continue # print("RAISE!") raise NotASurrogateError return str().join(decoded)
[ "def", "replace_surrogate_decode", "(", "mybytes", ")", ":", "decoded", "=", "[", "]", "for", "ch", "in", "mybytes", ":", "# We may be parsing newbytes (in which case ch is an int) or a native", "# str on Py2", "if", "isinstance", "(", "ch", ",", "int", ")", ":", "code", "=", "ch", "else", ":", "code", "=", "ord", "(", "ch", ")", "if", "0x80", "<=", "code", "<=", "0xFF", ":", "decoded", ".", "append", "(", "_unichr", "(", "0xDC00", "+", "code", ")", ")", "elif", "code", "<=", "0x7F", ":", "decoded", ".", "append", "(", "_unichr", "(", "code", ")", ")", "else", ":", "# # It may be a bad byte", "# # Try swallowing it.", "# continue", "# print(\"RAISE!\")", "raise", "NotASurrogateError", "return", "str", "(", ")", ".", "join", "(", "decoded", ")" ]
28.565217
12.73913
def _get_logger(self): """Get the appropriate logger Prevents uninitialized servers in write-only mode from failing. """ if self.logger: return self.logger elif self.server: return self.server.logger else: return default_logger
[ "def", "_get_logger", "(", "self", ")", ":", "if", "self", ".", "logger", ":", "return", "self", ".", "logger", "elif", "self", ".", "server", ":", "return", "self", ".", "server", ".", "logger", "else", ":", "return", "default_logger" ]
25.166667
17.333333
def contrast(self, value=1.0): """Increases or decreases the contrast in the layer. The given value is a percentage to increase or decrease the image contrast, for example 1.2 means contrast at 120%. """ c = ImageEnhance.Contrast(self.img) self.img = c.enhance(value)
[ "def", "contrast", "(", "self", ",", "value", "=", "1.0", ")", ":", "c", "=", "ImageEnhance", ".", "Contrast", "(", "self", ".", "img", ")", "self", ".", "img", "=", "c", ".", "enhance", "(", "value", ")" ]
27.416667
15.5
def create(years, certificate_type, promotion_code=None, sans_to_add=None): ''' Creates a new SSL certificate. Returns the following information: - Whether or not the SSL order was successful - The certificate ID - The order ID - The transaction ID - The amount charged for the order - The date on which the certificate was created - The date on which the certificate will expire - The type of SSL certificate - The number of years for which the certificate was purchased - The current status of the SSL certificate years : 1 Number of years to register certificate_type Type of SSL Certificate. Possible values include: - EV Multi Domain SSL - EV SSL - EV SSL SGC - EssentialSSL - EssentialSSL Wildcard - InstantSSL - InstantSSL Pro - Multi Domain SSL - PositiveSSL - PositiveSSL Multi Domain - PositiveSSL Wildcard - PremiumSSL - PremiumSSL Wildcard - QuickSSL Premium - RapidSSL - RapidSSL Wildcard - SGC Supercert - SSL Web Server - SSL Webserver EV - SSL123 - Secure Site - Secure Site Pro - Secure Site Pro with EV - Secure Site with EV - True BusinessID - True BusinessID Multi Domain - True BusinessID Wildcard - True BusinessID with EV - True BusinessID with EV Multi Domain - Unified Communications promotional_code An optional promo code to use when creating the certificate sans_to_add : 0 This parameter defines the number of add-on domains to be purchased in addition to the default number of domains included with a multi-domain certificate. Each certificate that supports SANs has the default number of domains included. You may check the default number of domains included and the maximum number of domains that can be added to it in the table below. +----------+----------------+----------------------+-------------------+----------------+ | Provider | Product name | Default number of | Maximum number of | Maximum number | | | | domains (domain from | total domains | of domains | | | | CSR is counted here) | | that can be | | | | | | passed in | | | | | | sans_to_add | | | | | | parameter | +----------+----------------+----------------------+-------------------+----------------+ | Comodo | PositiveSSL | 3 | 100 | 97 | | | Multi-Domain | | | | +----------+----------------+----------------------+-------------------+----------------+ | Comodo | Multi-Domain | 3 | 100 | 97 | | | SSL | | | | +----------+----------------+----------------------+-------------------+----------------+ | Comodo | EV Multi- | 3 | 100 | 97 | | | Domain SSL | | | | +----------+----------------+----------------------+-------------------+----------------+ | Comodo | Unified | 3 | 100 | 97 | | | Communications | | | | +----------+----------------+----------------------+-------------------+----------------+ | GeoTrust | QuickSSL | 1 | 1 domain + | The only | | | Premium | | 4 subdomains | supported | | | | | | value is 4 | +----------+----------------+----------------------+-------------------+----------------+ | GeoTrust | True | 5 | 25 | 20 | | | BusinessID | | | | | | with EV | | | | | | Multi-Domain | | | | +----------+----------------+----------------------+-------------------+----------------+ | GeoTrust | True Business | 5 | 25 | 20 | | | ID Multi- | | | | | | Domain | | | | +----------+----------------+----------------------+-------------------+----------------+ | Thawte | SSL Web | 1 | 25 | 24 | | | Server | | | | +----------+----------------+----------------------+-------------------+----------------+ | Thawte | SSL Web | 1 | 25 | 24 | | | Server with | | | | | | EV | | | | +----------+----------------+----------------------+-------------------+----------------+ | Thawte | SGC Supercerts | 1 | 25 | 24 | +----------+----------------+----------------------+-------------------+----------------+ | Symantec | Secure Site | 1 | 25 | 24 | | | Pro with EV | | | | +----------+----------------+----------------------+-------------------+----------------+ | Symantec | Secure Site | 1 | 25 | 24 | | | with EV | | | | +----------+----------------+----------------------+-------------------+----------------+ | Symantec | Secure Site | 1 | 25 | 24 | +----------+----------------+----------------------+-------------------+----------------+ | Symantec | Secure Site | 1 | 25 | 24 | | | Pro | | | | +----------+----------------+----------------------+-------------------+----------------+ CLI Example: .. code-block:: bash salt 'my-minion' namecheap_ssl.create 2 RapidSSL ''' valid_certs = ('QuickSSL Premium', 'RapidSSL', 'RapidSSL Wildcard', 'PremiumSSL', 'InstantSSL', 'PositiveSSL', 'PositiveSSL Wildcard', 'True BusinessID with EV', 'True BusinessID', 'True BusinessID Wildcard', 'True BusinessID Multi Domain', 'True BusinessID with EV Multi Domain', 'Secure Site', 'Secure Site Pro', 'Secure Site with EV', 'Secure Site Pro with EV', 'EssentialSSL', 'EssentialSSL Wildcard', 'InstantSSL Pro', 'PremiumSSL Wildcard', 'EV SSL', 'EV SSL SGC', 'SSL123', 'SSL Web Server', 'SGC Supercert', 'SSL Webserver EV', 'EV Multi Domain SSL', 'Multi Domain SSL', 'PositiveSSL Multi Domain', 'Unified Communications', ) if certificate_type not in valid_certs: log.error('Invalid option for certificate_type=%s', certificate_type) raise Exception('Invalid option for certificate_type=' + certificate_type) if years < 1 or years > 5: log.error('Invalid option for years=%s', years) raise Exception('Invalid option for years=' + six.text_type(years)) opts = salt.utils.namecheap.get_opts('namecheap.ssl.create') opts['Years'] = years opts['Type'] = certificate_type if promotion_code is not None: opts['PromotionCode'] = promotion_code if sans_to_add is not None: opts['SANStoADD'] = sans_to_add response_xml = salt.utils.namecheap.post_request(opts) if response_xml is None: return {} sslcreateresult = response_xml.getElementsByTagName('SSLCreateResult')[0] sslcertinfo = sslcreateresult.getElementsByTagName('SSLCertificate')[0] result = salt.utils.namecheap.atts_to_dict(sslcreateresult) result.update(salt.utils.namecheap.atts_to_dict(sslcertinfo)) return result
[ "def", "create", "(", "years", ",", "certificate_type", ",", "promotion_code", "=", "None", ",", "sans_to_add", "=", "None", ")", ":", "valid_certs", "=", "(", "'QuickSSL Premium'", ",", "'RapidSSL'", ",", "'RapidSSL Wildcard'", ",", "'PremiumSSL'", ",", "'InstantSSL'", ",", "'PositiveSSL'", ",", "'PositiveSSL Wildcard'", ",", "'True BusinessID with EV'", ",", "'True BusinessID'", ",", "'True BusinessID Wildcard'", ",", "'True BusinessID Multi Domain'", ",", "'True BusinessID with EV Multi Domain'", ",", "'Secure Site'", ",", "'Secure Site Pro'", ",", "'Secure Site with EV'", ",", "'Secure Site Pro with EV'", ",", "'EssentialSSL'", ",", "'EssentialSSL Wildcard'", ",", "'InstantSSL Pro'", ",", "'PremiumSSL Wildcard'", ",", "'EV SSL'", ",", "'EV SSL SGC'", ",", "'SSL123'", ",", "'SSL Web Server'", ",", "'SGC Supercert'", ",", "'SSL Webserver EV'", ",", "'EV Multi Domain SSL'", ",", "'Multi Domain SSL'", ",", "'PositiveSSL Multi Domain'", ",", "'Unified Communications'", ",", ")", "if", "certificate_type", "not", "in", "valid_certs", ":", "log", ".", "error", "(", "'Invalid option for certificate_type=%s'", ",", "certificate_type", ")", "raise", "Exception", "(", "'Invalid option for certificate_type='", "+", "certificate_type", ")", "if", "years", "<", "1", "or", "years", ">", "5", ":", "log", ".", "error", "(", "'Invalid option for years=%s'", ",", "years", ")", "raise", "Exception", "(", "'Invalid option for years='", "+", "six", ".", "text_type", "(", "years", ")", ")", "opts", "=", "salt", ".", "utils", ".", "namecheap", ".", "get_opts", "(", "'namecheap.ssl.create'", ")", "opts", "[", "'Years'", "]", "=", "years", "opts", "[", "'Type'", "]", "=", "certificate_type", "if", "promotion_code", "is", "not", "None", ":", "opts", "[", "'PromotionCode'", "]", "=", "promotion_code", "if", "sans_to_add", "is", "not", "None", ":", "opts", "[", "'SANStoADD'", "]", "=", "sans_to_add", "response_xml", "=", "salt", ".", "utils", ".", "namecheap", ".", "post_request", "(", "opts", ")", "if", "response_xml", "is", "None", ":", "return", "{", "}", "sslcreateresult", "=", "response_xml", ".", "getElementsByTagName", "(", "'SSLCreateResult'", ")", "[", "0", "]", "sslcertinfo", "=", "sslcreateresult", ".", "getElementsByTagName", "(", "'SSLCertificate'", ")", "[", "0", "]", "result", "=", "salt", ".", "utils", ".", "namecheap", ".", "atts_to_dict", "(", "sslcreateresult", ")", "result", ".", "update", "(", "salt", ".", "utils", ".", "namecheap", ".", "atts_to_dict", "(", "sslcertinfo", ")", ")", "return", "result" ]
50.296703
27.813187
async def apply_command(self, cmd): """ applies a command This calls the pre and post hooks attached to the command, as well as :meth:`cmd.apply`. :param cmd: an applicable command :type cmd: :class:`~alot.commands.Command` """ # FIXME: What are we guarding for here? We don't mention that None is # allowed as a value fo cmd. if cmd: if cmd.prehook: await cmd.prehook(ui=self, dbm=self.dbman, cmd=cmd) try: if asyncio.iscoroutinefunction(cmd.apply): await cmd.apply(self) else: cmd.apply(self) except Exception as e: self._error_handler(e) else: if cmd.posthook: logging.info('calling post-hook') await cmd.posthook(ui=self, dbm=self.dbman, cmd=cmd)
[ "async", "def", "apply_command", "(", "self", ",", "cmd", ")", ":", "# FIXME: What are we guarding for here? We don't mention that None is", "# allowed as a value fo cmd.", "if", "cmd", ":", "if", "cmd", ".", "prehook", ":", "await", "cmd", ".", "prehook", "(", "ui", "=", "self", ",", "dbm", "=", "self", ".", "dbman", ",", "cmd", "=", "cmd", ")", "try", ":", "if", "asyncio", ".", "iscoroutinefunction", "(", "cmd", ".", "apply", ")", ":", "await", "cmd", ".", "apply", "(", "self", ")", "else", ":", "cmd", ".", "apply", "(", "self", ")", "except", "Exception", "as", "e", ":", "self", ".", "_error_handler", "(", "e", ")", "else", ":", "if", "cmd", ".", "posthook", ":", "logging", ".", "info", "(", "'calling post-hook'", ")", "await", "cmd", ".", "posthook", "(", "ui", "=", "self", ",", "dbm", "=", "self", ".", "dbman", ",", "cmd", "=", "cmd", ")" ]
35.192308
15.269231
def persistant_success(request, message, extra_tags='', fail_silently=False, *args, **kwargs): """ Adds a persistant message with the ``SUCCESS`` level. """ add_message(request, SUCCESS_PERSISTENT, message, extra_tags=extra_tags, fail_silently=fail_silently, *args, **kwargs)
[ "def", "persistant_success", "(", "request", ",", "message", ",", "extra_tags", "=", "''", ",", "fail_silently", "=", "False", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "add_message", "(", "request", ",", "SUCCESS_PERSISTENT", ",", "message", ",", "extra_tags", "=", "extra_tags", ",", "fail_silently", "=", "fail_silently", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
50.333333
21.333333
def get_data(self): """ returns a generator object that reads data a block at a time; the generator prints "File depleted" and returns nothing when all data in the file has been read. :return: """ with self as gr: while True: try: yield gr.read_next_data_block_int8() except Exception as e: print("File depleted") yield None, None, None
[ "def", "get_data", "(", "self", ")", ":", "with", "self", "as", "gr", ":", "while", "True", ":", "try", ":", "yield", "gr", ".", "read_next_data_block_int8", "(", ")", "except", "Exception", "as", "e", ":", "print", "(", "\"File depleted\"", ")", "yield", "None", ",", "None", ",", "None" ]
36.615385
16.461538
def calc_freefree_kappa(ne, t, hz): """Dulk (1985) eq 20, assuming pure hydrogen.""" return 9.78e-3 * ne**2 * hz**-2 * t**-1.5 * (24.5 + np.log(t) - np.log(hz))
[ "def", "calc_freefree_kappa", "(", "ne", ",", "t", ",", "hz", ")", ":", "return", "9.78e-3", "*", "ne", "**", "2", "*", "hz", "**", "-", "2", "*", "t", "**", "-", "1.5", "*", "(", "24.5", "+", "np", ".", "log", "(", "t", ")", "-", "np", ".", "log", "(", "hz", ")", ")" ]
55.333333
14.666667
def to_sympy_column_matrix(matrix): """ Converts a sympy matrix to a column matrix (i.e. transposes it if it was row matrix) Raises ValueError if matrix provided is not a vector :param matrix: a vector to be converted to column :return: """ matrix = to_sympy_matrix(matrix) if matrix.cols == 1: return matrix elif matrix.rows == 1: return matrix.T else: raise ValueError('Cannot convert {0!r} to a column matrix'.format(matrix))
[ "def", "to_sympy_column_matrix", "(", "matrix", ")", ":", "matrix", "=", "to_sympy_matrix", "(", "matrix", ")", "if", "matrix", ".", "cols", "==", "1", ":", "return", "matrix", "elif", "matrix", ".", "rows", "==", "1", ":", "return", "matrix", ".", "T", "else", ":", "raise", "ValueError", "(", "'Cannot convert {0!r} to a column matrix'", ".", "format", "(", "matrix", ")", ")" ]
34.214286
18.071429
def get_folders(self): """ Returns a list of all folders for this account Returns: List[:class:`Folder <pyOutlook.core.folder.Folder>`] """ endpoint = 'https://outlook.office.com/api/v2.0/me/MailFolders/' r = requests.get(endpoint, headers=self._headers) if check_response(r): return Folder._json_to_folders(self, r.json())
[ "def", "get_folders", "(", "self", ")", ":", "endpoint", "=", "'https://outlook.office.com/api/v2.0/me/MailFolders/'", "r", "=", "requests", ".", "get", "(", "endpoint", ",", "headers", "=", "self", ".", "_headers", ")", "if", "check_response", "(", "r", ")", ":", "return", "Folder", ".", "_json_to_folders", "(", "self", ",", "r", ".", "json", "(", ")", ")" ]
32.916667
22
def load_config(self, config_file_name): """ Load configuration file from prt or str. Configuration file type is extracted from the file suffix - prt or str. :param config_file_name: full path to the configuration file. IxTclServer must have access to the file location. either: The config file is on shared folder. IxTclServer run on the client machine. """ config_file_name = config_file_name.replace('\\', '/') ext = path.splitext(config_file_name)[-1].lower() if ext == '.prt': self.api.call_rc('port import "{}" {}'.format(config_file_name, self.uri)) elif ext == '.str': self.reset() self.api.call_rc('stream import "{}" {}'.format(config_file_name, self.uri)) else: raise ValueError('Configuration file type {} not supported.'.format(ext)) self.write() self.discover()
[ "def", "load_config", "(", "self", ",", "config_file_name", ")", ":", "config_file_name", "=", "config_file_name", ".", "replace", "(", "'\\\\'", ",", "'/'", ")", "ext", "=", "path", ".", "splitext", "(", "config_file_name", ")", "[", "-", "1", "]", ".", "lower", "(", ")", "if", "ext", "==", "'.prt'", ":", "self", ".", "api", ".", "call_rc", "(", "'port import \"{}\" {}'", ".", "format", "(", "config_file_name", ",", "self", ".", "uri", ")", ")", "elif", "ext", "==", "'.str'", ":", "self", ".", "reset", "(", ")", "self", ".", "api", ".", "call_rc", "(", "'stream import \"{}\" {}'", ".", "format", "(", "config_file_name", ",", "self", ".", "uri", ")", ")", "else", ":", "raise", "ValueError", "(", "'Configuration file type {} not supported.'", ".", "format", "(", "ext", ")", ")", "self", ".", "write", "(", ")", "self", ".", "discover", "(", ")" ]
44.619048
23.333333
def pitch(self): """ Calculates the Pitch of the Quaternion. """ x, y, z, w = self.x, self.y, self.z, self.w return math.atan2(2*x*w - 2*y*z, 1 - 2*x*x - 2*z*z)
[ "def", "pitch", "(", "self", ")", ":", "x", ",", "y", ",", "z", ",", "w", "=", "self", ".", "x", ",", "self", ".", "y", ",", "self", ".", "z", ",", "self", ".", "w", "return", "math", ".", "atan2", "(", "2", "*", "x", "*", "w", "-", "2", "*", "y", "*", "z", ",", "1", "-", "2", "*", "x", "*", "x", "-", "2", "*", "z", "*", "z", ")" ]
33.8
17.2
def _get_bucket(self, bucket_name): '''get a bucket based on a bucket name. If it doesn't exist, create it. Parameters ========== bucket_name: the name of the bucket to get (or create). It should not contain google, and should be all lowercase with - or underscores. ''' # Case 1: The bucket already exists try: bucket = self._bucket_service.get_bucket(bucket_name) # Case 2: The bucket needs to be created except google.cloud.exceptions.NotFound: bucket = self._bucket_service.create_bucket(bucket_name) # Case 3: The bucket name is already taken except: bot.error('Cannot get or create %s' % bucket_name) sys.exit(1) return bucket
[ "def", "_get_bucket", "(", "self", ",", "bucket_name", ")", ":", "# Case 1: The bucket already exists", "try", ":", "bucket", "=", "self", ".", "_bucket_service", ".", "get_bucket", "(", "bucket_name", ")", "# Case 2: The bucket needs to be created", "except", "google", ".", "cloud", ".", "exceptions", ".", "NotFound", ":", "bucket", "=", "self", ".", "_bucket_service", ".", "create_bucket", "(", "bucket_name", ")", "# Case 3: The bucket name is already taken", "except", ":", "bot", ".", "error", "(", "'Cannot get or create %s'", "%", "bucket_name", ")", "sys", ".", "exit", "(", "1", ")", "return", "bucket" ]
33.958333
24.125
def by_member(self, member_id, type='introduced'): """ Takes a bioguide ID and a type: (introduced|updated|cosponsored|withdrawn) Returns recent bills """ path = "members/{member_id}/bills/{type}.json".format( member_id=member_id, type=type) return self.fetch(path)
[ "def", "by_member", "(", "self", ",", "member_id", ",", "type", "=", "'introduced'", ")", ":", "path", "=", "\"members/{member_id}/bills/{type}.json\"", ".", "format", "(", "member_id", "=", "member_id", ",", "type", "=", "type", ")", "return", "self", ".", "fetch", "(", "path", ")" ]
36.111111
7.444444
def reject_entry(request, entry_id): """ Admins can reject an entry that has been verified or approved but not invoiced to set its status to 'unverified' for the user to fix. """ return_url = request.GET.get('next', reverse('dashboard')) try: entry = Entry.no_join.get(pk=entry_id) except: message = 'No such log entry.' messages.error(request, message) return redirect(return_url) if entry.status == Entry.UNVERIFIED or entry.status == Entry.INVOICED: msg_text = 'This entry is unverified or is already invoiced.' messages.error(request, msg_text) return redirect(return_url) if request.POST.get('Yes'): entry.status = Entry.UNVERIFIED entry.save() msg_text = 'The entry\'s status was set to unverified.' messages.info(request, msg_text) return redirect(return_url) return render(request, 'timepiece/entry/reject.html', { 'entry': entry, 'next': request.GET.get('next'), })
[ "def", "reject_entry", "(", "request", ",", "entry_id", ")", ":", "return_url", "=", "request", ".", "GET", ".", "get", "(", "'next'", ",", "reverse", "(", "'dashboard'", ")", ")", "try", ":", "entry", "=", "Entry", ".", "no_join", ".", "get", "(", "pk", "=", "entry_id", ")", "except", ":", "message", "=", "'No such log entry.'", "messages", ".", "error", "(", "request", ",", "message", ")", "return", "redirect", "(", "return_url", ")", "if", "entry", ".", "status", "==", "Entry", ".", "UNVERIFIED", "or", "entry", ".", "status", "==", "Entry", ".", "INVOICED", ":", "msg_text", "=", "'This entry is unverified or is already invoiced.'", "messages", ".", "error", "(", "request", ",", "msg_text", ")", "return", "redirect", "(", "return_url", ")", "if", "request", ".", "POST", ".", "get", "(", "'Yes'", ")", ":", "entry", ".", "status", "=", "Entry", ".", "UNVERIFIED", "entry", ".", "save", "(", ")", "msg_text", "=", "'The entry\\'s status was set to unverified.'", "messages", ".", "info", "(", "request", ",", "msg_text", ")", "return", "redirect", "(", "return_url", ")", "return", "render", "(", "request", ",", "'timepiece/entry/reject.html'", ",", "{", "'entry'", ":", "entry", ",", "'next'", ":", "request", ".", "GET", ".", "get", "(", "'next'", ")", ",", "}", ")" ]
35.892857
15.607143
def circ_corrcc(x, y, tail='two-sided'): """Correlation coefficient between two circular variables. Parameters ---------- x : np.array First circular variable (expressed in radians) y : np.array Second circular variable (expressed in radians) tail : string Specify whether to return 'one-sided' or 'two-sided' p-value. Returns ------- r : float Correlation coefficient pval : float Uncorrected p-value Notes ----- Adapted from the CircStats MATLAB toolbox (Berens 2009). Use the np.deg2rad function to convert angles from degrees to radians. Please note that NaN are automatically removed. Examples -------- Compute the r and p-value of two circular variables >>> from pingouin import circ_corrcc >>> x = [0.785, 1.570, 3.141, 3.839, 5.934] >>> y = [0.593, 1.291, 2.879, 3.892, 6.108] >>> r, pval = circ_corrcc(x, y) >>> print(r, pval) 0.942 0.06579836070349088 """ from scipy.stats import norm x = np.asarray(x) y = np.asarray(y) # Check size if x.size != y.size: raise ValueError('x and y must have the same length.') # Remove NA x, y = remove_na(x, y, paired=True) n = x.size # Compute correlation coefficient x_sin = np.sin(x - circmean(x)) y_sin = np.sin(y - circmean(y)) # Similar to np.corrcoef(x_sin, y_sin)[0][1] r = np.sum(x_sin * y_sin) / np.sqrt(np.sum(x_sin**2) * np.sum(y_sin**2)) # Compute T- and p-values tval = np.sqrt((n * (x_sin**2).mean() * (y_sin**2).mean()) / np.mean(x_sin**2 * y_sin**2)) * r # Approximately distributed as a standard normal pval = 2 * norm.sf(abs(tval)) pval = pval / 2 if tail == 'one-sided' else pval return np.round(r, 3), pval
[ "def", "circ_corrcc", "(", "x", ",", "y", ",", "tail", "=", "'two-sided'", ")", ":", "from", "scipy", ".", "stats", "import", "norm", "x", "=", "np", ".", "asarray", "(", "x", ")", "y", "=", "np", ".", "asarray", "(", "y", ")", "# Check size", "if", "x", ".", "size", "!=", "y", ".", "size", ":", "raise", "ValueError", "(", "'x and y must have the same length.'", ")", "# Remove NA", "x", ",", "y", "=", "remove_na", "(", "x", ",", "y", ",", "paired", "=", "True", ")", "n", "=", "x", ".", "size", "# Compute correlation coefficient", "x_sin", "=", "np", ".", "sin", "(", "x", "-", "circmean", "(", "x", ")", ")", "y_sin", "=", "np", ".", "sin", "(", "y", "-", "circmean", "(", "y", ")", ")", "# Similar to np.corrcoef(x_sin, y_sin)[0][1]", "r", "=", "np", ".", "sum", "(", "x_sin", "*", "y_sin", ")", "/", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "x_sin", "**", "2", ")", "*", "np", ".", "sum", "(", "y_sin", "**", "2", ")", ")", "# Compute T- and p-values", "tval", "=", "np", ".", "sqrt", "(", "(", "n", "*", "(", "x_sin", "**", "2", ")", ".", "mean", "(", ")", "*", "(", "y_sin", "**", "2", ")", ".", "mean", "(", ")", ")", "/", "np", ".", "mean", "(", "x_sin", "**", "2", "*", "y_sin", "**", "2", ")", ")", "*", "r", "# Approximately distributed as a standard normal", "pval", "=", "2", "*", "norm", ".", "sf", "(", "abs", "(", "tval", ")", ")", "pval", "=", "pval", "/", "2", "if", "tail", "==", "'one-sided'", "else", "pval", "return", "np", ".", "round", "(", "r", ",", "3", ")", ",", "pval" ]
27.546875
20.96875
def _normalize_orders(self): """Helper: adjust orders based on cursors, where clauses.""" orders = list(self._orders) _has_snapshot_cursor = False if self._start_at: if isinstance(self._start_at[0], document.DocumentSnapshot): _has_snapshot_cursor = True if self._end_at: if isinstance(self._end_at[0], document.DocumentSnapshot): _has_snapshot_cursor = True if _has_snapshot_cursor: should_order = [ _enum_from_op_string(key) for key in _COMPARISON_OPERATORS if key not in (_EQ_OP, "array_contains") ] order_keys = [order.field.field_path for order in orders] for filter_ in self._field_filters: field = filter_.field.field_path if filter_.op in should_order and field not in order_keys: orders.append(self._make_order(field, "ASCENDING")) if not orders: orders.append(self._make_order("__name__", "ASCENDING")) else: order_keys = [order.field.field_path for order in orders] if "__name__" not in order_keys: direction = orders[-1].direction # enum? orders.append(self._make_order("__name__", direction)) return orders
[ "def", "_normalize_orders", "(", "self", ")", ":", "orders", "=", "list", "(", "self", ".", "_orders", ")", "_has_snapshot_cursor", "=", "False", "if", "self", ".", "_start_at", ":", "if", "isinstance", "(", "self", ".", "_start_at", "[", "0", "]", ",", "document", ".", "DocumentSnapshot", ")", ":", "_has_snapshot_cursor", "=", "True", "if", "self", ".", "_end_at", ":", "if", "isinstance", "(", "self", ".", "_end_at", "[", "0", "]", ",", "document", ".", "DocumentSnapshot", ")", ":", "_has_snapshot_cursor", "=", "True", "if", "_has_snapshot_cursor", ":", "should_order", "=", "[", "_enum_from_op_string", "(", "key", ")", "for", "key", "in", "_COMPARISON_OPERATORS", "if", "key", "not", "in", "(", "_EQ_OP", ",", "\"array_contains\"", ")", "]", "order_keys", "=", "[", "order", ".", "field", ".", "field_path", "for", "order", "in", "orders", "]", "for", "filter_", "in", "self", ".", "_field_filters", ":", "field", "=", "filter_", ".", "field", ".", "field_path", "if", "filter_", ".", "op", "in", "should_order", "and", "field", "not", "in", "order_keys", ":", "orders", ".", "append", "(", "self", ".", "_make_order", "(", "field", ",", "\"ASCENDING\"", ")", ")", "if", "not", "orders", ":", "orders", ".", "append", "(", "self", ".", "_make_order", "(", "\"__name__\"", ",", "\"ASCENDING\"", ")", ")", "else", ":", "order_keys", "=", "[", "order", ".", "field", ".", "field_path", "for", "order", "in", "orders", "]", "if", "\"__name__\"", "not", "in", "order_keys", ":", "direction", "=", "orders", "[", "-", "1", "]", ".", "direction", "# enum?", "orders", ".", "append", "(", "self", ".", "_make_order", "(", "\"__name__\"", ",", "direction", ")", ")", "return", "orders" ]
41.363636
19.515152
def close(self) -> None: """Closes all loaded tables.""" while self.wdl: _, wdl = self.wdl.popitem() wdl.close() while self.dtz: _, dtz = self.dtz.popitem() dtz.close() self.lru.clear()
[ "def", "close", "(", "self", ")", "->", "None", ":", "while", "self", ".", "wdl", ":", "_", ",", "wdl", "=", "self", ".", "wdl", ".", "popitem", "(", ")", "wdl", ".", "close", "(", ")", "while", "self", ".", "dtz", ":", "_", ",", "dtz", "=", "self", ".", "dtz", ".", "popitem", "(", ")", "dtz", ".", "close", "(", ")", "self", ".", "lru", ".", "clear", "(", ")" ]
23.363636
16.545455
def set_key(self, key='C'): """Add a key signature event to the track_data.""" if isinstance(key, Key): key = key.name[0] self.track_data += self.key_signature_event(key)
[ "def", "set_key", "(", "self", ",", "key", "=", "'C'", ")", ":", "if", "isinstance", "(", "key", ",", "Key", ")", ":", "key", "=", "key", ".", "name", "[", "0", "]", "self", ".", "track_data", "+=", "self", ".", "key_signature_event", "(", "key", ")" ]
40.4
9.6
def path(self): """HTTP full path constraint. (read-only). """ path = self.__prefix if self is not self.over: path = self.over.path + path return path
[ "def", "path", "(", "self", ")", ":", "path", "=", "self", ".", "__prefix", "if", "self", "is", "not", "self", ".", "over", ":", "path", "=", "self", ".", "over", ".", "path", "+", "path", "return", "path" ]
28
9.285714
def constant(self, val, ty): """ Creates a constant as a VexValue :param val: The value, as an integer :param ty: The type of the resulting VexValue :return: a VexValue """ if isinstance(val, VexValue) and not isinstance(val, IRExpr): raise Exception('Constant cannot be made from VexValue or IRExpr') rdt = self.irsb_c.mkconst(val, ty) return VexValue(self.irsb_c, rdt)
[ "def", "constant", "(", "self", ",", "val", ",", "ty", ")", ":", "if", "isinstance", "(", "val", ",", "VexValue", ")", "and", "not", "isinstance", "(", "val", ",", "IRExpr", ")", ":", "raise", "Exception", "(", "'Constant cannot be made from VexValue or IRExpr'", ")", "rdt", "=", "self", ".", "irsb_c", ".", "mkconst", "(", "val", ",", "ty", ")", "return", "VexValue", "(", "self", ".", "irsb_c", ",", "rdt", ")" ]
37
12.666667
def step(self, compute=True): """Context manager to gradually build a history row, then commit it at the end. To reduce the number of conditionals needed, code can check run.history.compute: with run.history.step(batch_idx % log_interval == 0): run.history.add({"nice": "ok"}) if run.history.compute: # Something expensive here """ if self.batched: # we're already in a context manager raise wandb.Error("Nested History step contexts aren't supported") self.batched = True self.compute = compute yield self if compute: self._write() compute = True
[ "def", "step", "(", "self", ",", "compute", "=", "True", ")", ":", "if", "self", ".", "batched", ":", "# we're already in a context manager", "raise", "wandb", ".", "Error", "(", "\"Nested History step contexts aren't supported\"", ")", "self", ".", "batched", "=", "True", "self", ".", "compute", "=", "compute", "yield", "self", "if", "compute", ":", "self", ".", "_write", "(", ")", "compute", "=", "True" ]
37.611111
18.277778
def WriteHashes(self, arr): """ Write an array of hashes to the stream. Args: arr (list): a list of 32 byte hashes. """ length = len(arr) self.WriteVarInt(length) for item in arr: ba = bytearray(binascii.unhexlify(item)) ba.reverse() # logger.info("WRITING HASH %s " % ba) self.WriteBytes(ba)
[ "def", "WriteHashes", "(", "self", ",", "arr", ")", ":", "length", "=", "len", "(", "arr", ")", "self", ".", "WriteVarInt", "(", "length", ")", "for", "item", "in", "arr", ":", "ba", "=", "bytearray", "(", "binascii", ".", "unhexlify", "(", "item", ")", ")", "ba", ".", "reverse", "(", ")", "# logger.info(\"WRITING HASH %s \" % ba)", "self", ".", "WriteBytes", "(", "ba", ")" ]
29.071429
13.785714
def find_matches_by_sigma(cos_vects, unc_vect, cut_sigma): """Find all the pairs of sources within a given distance of each other. Parameters ---------- cos_vects : np.ndarray(3,nsrc) Directional cosines (i.e., x,y,z component) values of all the sources unc_vect : np.ndarray(nsrc) Uncertainties on the source positions cut_sigma : float Angular cut in positional errors standard deviations that will be used to select pairs by their separation. Returns ------- match_dict : dict((int,int):float) Each entry gives a pair of source indices, and the corresponding sigma """ match_dict = {} sig_2_vect = unc_vect * unc_vect for i, v1 in enumerate(cos_vects.T): cos_t_vect = (v1 * cos_vects.T).sum(1) cos_t_vect[cos_t_vect < -1.0] = -1.0 cos_t_vect[cos_t_vect > 1.0] = 1.0 sig_2_i = sig_2_vect[i] acos_t_vect = np.degrees(np.arccos(cos_t_vect)) total_unc = np.sqrt(sig_2_i + sig_2_vect) sigma_vect = acos_t_vect / total_unc mask = sigma_vect < cut_sigma for j in np.where(mask[:i])[0]: match_dict[(j, i)] = sigma_vect[j] return match_dict
[ "def", "find_matches_by_sigma", "(", "cos_vects", ",", "unc_vect", ",", "cut_sigma", ")", ":", "match_dict", "=", "{", "}", "sig_2_vect", "=", "unc_vect", "*", "unc_vect", "for", "i", ",", "v1", "in", "enumerate", "(", "cos_vects", ".", "T", ")", ":", "cos_t_vect", "=", "(", "v1", "*", "cos_vects", ".", "T", ")", ".", "sum", "(", "1", ")", "cos_t_vect", "[", "cos_t_vect", "<", "-", "1.0", "]", "=", "-", "1.0", "cos_t_vect", "[", "cos_t_vect", ">", "1.0", "]", "=", "1.0", "sig_2_i", "=", "sig_2_vect", "[", "i", "]", "acos_t_vect", "=", "np", ".", "degrees", "(", "np", ".", "arccos", "(", "cos_t_vect", ")", ")", "total_unc", "=", "np", ".", "sqrt", "(", "sig_2_i", "+", "sig_2_vect", ")", "sigma_vect", "=", "acos_t_vect", "/", "total_unc", "mask", "=", "sigma_vect", "<", "cut_sigma", "for", "j", "in", "np", ".", "where", "(", "mask", "[", ":", "i", "]", ")", "[", "0", "]", ":", "match_dict", "[", "(", "j", ",", "i", ")", "]", "=", "sigma_vect", "[", "j", "]", "return", "match_dict" ]
32.621622
16.324324
def rel_path(filename): """ Function that gets relative path to the filename """ return os.path.join(os.getcwd(), os.path.dirname(__file__), filename)
[ "def", "rel_path", "(", "filename", ")", ":", "return", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "filename", ")" ]
32.4
12.4
def create(clients_num, clients_host, clients_port, people_num, throttle): """ Prepare clients to execute :return: Modules to execute, cmd line function :rtype: list[WrapperClient], (str, object) -> str | None """ res = [] for number in range(clients_num): sc = EchoClient({ 'id': number, 'listen_bind_ip': clients_host, #'multicast_bind_ip': "127.0.0.1", 'listen_port': clients_port + number }) people = [] for person_number in range(people_num): people.append(Person(id=person_number)) wrapper = WrapperEchoClient({ 'client': sc, 'people': people, 'throttle': throttle }) res.append(wrapper) return res, cmd_line
[ "def", "create", "(", "clients_num", ",", "clients_host", ",", "clients_port", ",", "people_num", ",", "throttle", ")", ":", "res", "=", "[", "]", "for", "number", "in", "range", "(", "clients_num", ")", ":", "sc", "=", "EchoClient", "(", "{", "'id'", ":", "number", ",", "'listen_bind_ip'", ":", "clients_host", ",", "#'multicast_bind_ip': \"127.0.0.1\",", "'listen_port'", ":", "clients_port", "+", "number", "}", ")", "people", "=", "[", "]", "for", "person_number", "in", "range", "(", "people_num", ")", ":", "people", ".", "append", "(", "Person", "(", "id", "=", "person_number", ")", ")", "wrapper", "=", "WrapperEchoClient", "(", "{", "'client'", ":", "sc", ",", "'people'", ":", "people", ",", "'throttle'", ":", "throttle", "}", ")", "res", ".", "append", "(", "wrapper", ")", "return", "res", ",", "cmd_line" ]
29.807692
15.269231
def _make_job_id(job_id, prefix=None): """Construct an ID for a new job. :type job_id: str or ``NoneType`` :param job_id: the user-provided job ID :type prefix: str or ``NoneType`` :param prefix: (Optional) the user-provided prefix for a job ID :rtype: str :returns: A job ID """ if job_id is not None: return job_id elif prefix is not None: return str(prefix) + str(uuid.uuid4()) else: return str(uuid.uuid4())
[ "def", "_make_job_id", "(", "job_id", ",", "prefix", "=", "None", ")", ":", "if", "job_id", "is", "not", "None", ":", "return", "job_id", "elif", "prefix", "is", "not", "None", ":", "return", "str", "(", "prefix", ")", "+", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "else", ":", "return", "str", "(", "uuid", ".", "uuid4", "(", ")", ")" ]
25.833333
16.166667
def on_redraw_timer(self, event): '''the redraw timer ensures we show new map tiles as they are downloaded''' state = self.state while state.in_queue.qsize(): obj = state.in_queue.get() if isinstance(obj, MPImageData): img = wx.EmptyImage(obj.width, obj.height) img.SetData(obj.data) self.img = img self.need_redraw = True if state.auto_size: client_area = state.frame.GetClientSize() total_area = state.frame.GetSize() bx = max(total_area.x - client_area.x,0) by = max(total_area.y - client_area.y,0) state.frame.SetSize(wx.Size(obj.width+bx, obj.height+by)) if isinstance(obj, MPImageTitle): state.frame.SetTitle(obj.title) if isinstance(obj, MPImageMenu): self.set_menu(obj.menu) if isinstance(obj, MPImagePopupMenu): self.set_popup_menu(obj.menu) if isinstance(obj, MPImageBrightness): state.brightness = obj.brightness self.need_redraw = True if isinstance(obj, MPImageFullSize): self.full_size() if isinstance(obj, MPImageFitToWindow): self.fit_to_window() if self.need_redraw: self.redraw()
[ "def", "on_redraw_timer", "(", "self", ",", "event", ")", ":", "state", "=", "self", ".", "state", "while", "state", ".", "in_queue", ".", "qsize", "(", ")", ":", "obj", "=", "state", ".", "in_queue", ".", "get", "(", ")", "if", "isinstance", "(", "obj", ",", "MPImageData", ")", ":", "img", "=", "wx", ".", "EmptyImage", "(", "obj", ".", "width", ",", "obj", ".", "height", ")", "img", ".", "SetData", "(", "obj", ".", "data", ")", "self", ".", "img", "=", "img", "self", ".", "need_redraw", "=", "True", "if", "state", ".", "auto_size", ":", "client_area", "=", "state", ".", "frame", ".", "GetClientSize", "(", ")", "total_area", "=", "state", ".", "frame", ".", "GetSize", "(", ")", "bx", "=", "max", "(", "total_area", ".", "x", "-", "client_area", ".", "x", ",", "0", ")", "by", "=", "max", "(", "total_area", ".", "y", "-", "client_area", ".", "y", ",", "0", ")", "state", ".", "frame", ".", "SetSize", "(", "wx", ".", "Size", "(", "obj", ".", "width", "+", "bx", ",", "obj", ".", "height", "+", "by", ")", ")", "if", "isinstance", "(", "obj", ",", "MPImageTitle", ")", ":", "state", ".", "frame", ".", "SetTitle", "(", "obj", ".", "title", ")", "if", "isinstance", "(", "obj", ",", "MPImageMenu", ")", ":", "self", ".", "set_menu", "(", "obj", ".", "menu", ")", "if", "isinstance", "(", "obj", ",", "MPImagePopupMenu", ")", ":", "self", ".", "set_popup_menu", "(", "obj", ".", "menu", ")", "if", "isinstance", "(", "obj", ",", "MPImageBrightness", ")", ":", "state", ".", "brightness", "=", "obj", ".", "brightness", "self", ".", "need_redraw", "=", "True", "if", "isinstance", "(", "obj", ",", "MPImageFullSize", ")", ":", "self", ".", "full_size", "(", ")", "if", "isinstance", "(", "obj", ",", "MPImageFitToWindow", ")", ":", "self", ".", "fit_to_window", "(", ")", "if", "self", ".", "need_redraw", ":", "self", ".", "redraw", "(", ")" ]
43.9375
10.25
def _cast_to_pod(val): """Try cast to int, float, bool, str, in that order.""" bools = {"True": True, "False": False} if val in bools: return bools[val] try: return int(val) except ValueError: try: return float(val) except ValueError: return tf.compat.as_text(val)
[ "def", "_cast_to_pod", "(", "val", ")", ":", "bools", "=", "{", "\"True\"", ":", "True", ",", "\"False\"", ":", "False", "}", "if", "val", "in", "bools", ":", "return", "bools", "[", "val", "]", "try", ":", "return", "int", "(", "val", ")", "except", "ValueError", ":", "try", ":", "return", "float", "(", "val", ")", "except", "ValueError", ":", "return", "tf", ".", "compat", ".", "as_text", "(", "val", ")" ]
24.25
17.166667
def exponential_map(term): """ Returns a function f(alpha) that constructs the Program corresponding to exp(-1j*alpha*term). :param term: A pauli term to exponentiate :returns: A function that takes an angle parameter and returns a program. :rtype: Function """ if not np.isclose(np.imag(term.coefficient), 0.0): raise TypeError("PauliTerm coefficient must be real") coeff = term.coefficient.real term.coefficient = term.coefficient.real def exp_wrap(param): prog = Program() if is_identity(term): prog.inst(X(0)) prog.inst(PHASE(-param * coeff, 0)) prog.inst(X(0)) prog.inst(PHASE(-param * coeff, 0)) elif is_zero(term): pass else: prog += _exponentiate_general_case(term, param) return prog return exp_wrap
[ "def", "exponential_map", "(", "term", ")", ":", "if", "not", "np", ".", "isclose", "(", "np", ".", "imag", "(", "term", ".", "coefficient", ")", ",", "0.0", ")", ":", "raise", "TypeError", "(", "\"PauliTerm coefficient must be real\"", ")", "coeff", "=", "term", ".", "coefficient", ".", "real", "term", ".", "coefficient", "=", "term", ".", "coefficient", ".", "real", "def", "exp_wrap", "(", "param", ")", ":", "prog", "=", "Program", "(", ")", "if", "is_identity", "(", "term", ")", ":", "prog", ".", "inst", "(", "X", "(", "0", ")", ")", "prog", ".", "inst", "(", "PHASE", "(", "-", "param", "*", "coeff", ",", "0", ")", ")", "prog", ".", "inst", "(", "X", "(", "0", ")", ")", "prog", ".", "inst", "(", "PHASE", "(", "-", "param", "*", "coeff", ",", "0", ")", ")", "elif", "is_zero", "(", "term", ")", ":", "pass", "else", ":", "prog", "+=", "_exponentiate_general_case", "(", "term", ",", "param", ")", "return", "prog", "return", "exp_wrap" ]
30.321429
19.535714
def list_versions(self, bucket_name, prefix='', delimiter='', max_results=1000, starting_key='', starting_version=''): ''' a method for retrieving a list of the versions of records in a bucket :param bucket_name: string with name of bucket :param prefix: [optional] string with value limiting results to key prefix :param delimiter: [optional] string with value limiting results to key delimiter :param max_results: [optional] integer with max results to return :param starting_key: [optional] string with key value to continue search with :param starting_version: [optional] string with version id to continue search with :return: list of results with key, size and date, dictionary with key and version id ''' title = '%s.list_versions' % self.__class__.__name__ from datetime import datetime from dateutil.tz import tzutc # validate inputs input_fields = { 'bucket_name': bucket_name, 'prefix': prefix, 'delimiter': delimiter, 'max_results': max_results, 'starting_key': starting_key, 'starting_version': starting_version } for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # validate mutual requirements if starting_version or starting_key: if not starting_version or not starting_key: raise ValueError('%s inputs starting_key and starting_version each require the other.' % title) # verify existence of bucket if not bucket_name in self.bucket_list: if not bucket_name in self.list_buckets(): raise ValueError('S3 Bucket "%s" does not exist in aws region %s.' % (bucket_name, self.iam.region_name)) # create key word argument dictionary kw_args = { 'Bucket': bucket_name } if starting_key: kw_args['KeyMarker'] = starting_key if starting_version: kw_args['VersionIdMarker'] = starting_version if prefix: kw_args['Prefix'] = prefix if delimiter: kw_args['Delimiter'] = delimiter if max_results: kw_args['MaxKeys'] = max_results # send request for objects record_list = [] next_key = {} try: response = self.connection.list_object_versions(**kw_args) except: raise AWSConnectionError(title) # add version keys and ids to object list if 'Versions' in response: for record in response['Versions']: details = { 'key': '', 'version_id': '' } details = self.iam.ingest(record, details) epoch_zero = datetime.fromtimestamp(0).replace(tzinfo=tzutc()) details['last_modified'] = (details['last_modified'] - epoch_zero).total_seconds() details['current_version'] = details['is_latest'] del details['is_latest'] record_list.append(details) # add delete markers to object list if 'DeleteMarkers' in response: for record in response['DeleteMarkers']: details = { 'key': '', 'version_id': '' } details = self.iam.ingest(record, details) epoch_zero = datetime.fromtimestamp(0).replace(tzinfo=tzutc()) details['last_modified'] = (details['last_modified'] - epoch_zero).total_seconds() details['current_version'] = details['is_latest'] del details['is_latest'] if not 'size' in details.keys(): details['size'] = 0 record_list.append(details) # define next key value if response['IsTruncated']: next_key = { 'key': response['NextKeyMarker'], 'version_id': response['NextVersionIdMarker'] } return record_list, next_key
[ "def", "list_versions", "(", "self", ",", "bucket_name", ",", "prefix", "=", "''", ",", "delimiter", "=", "''", ",", "max_results", "=", "1000", ",", "starting_key", "=", "''", ",", "starting_version", "=", "''", ")", ":", "title", "=", "'%s.list_versions'", "%", "self", ".", "__class__", ".", "__name__", "from", "datetime", "import", "datetime", "from", "dateutil", ".", "tz", "import", "tzutc", "# validate inputs", "input_fields", "=", "{", "'bucket_name'", ":", "bucket_name", ",", "'prefix'", ":", "prefix", ",", "'delimiter'", ":", "delimiter", ",", "'max_results'", ":", "max_results", ",", "'starting_key'", ":", "starting_key", ",", "'starting_version'", ":", "starting_version", "}", "for", "key", ",", "value", "in", "input_fields", ".", "items", "(", ")", ":", "if", "value", ":", "object_title", "=", "'%s(%s=%s)'", "%", "(", "title", ",", "key", ",", "str", "(", "value", ")", ")", "self", ".", "fields", ".", "validate", "(", "value", ",", "'.%s'", "%", "key", ",", "object_title", ")", "# validate mutual requirements", "if", "starting_version", "or", "starting_key", ":", "if", "not", "starting_version", "or", "not", "starting_key", ":", "raise", "ValueError", "(", "'%s inputs starting_key and starting_version each require the other.'", "%", "title", ")", "# verify existence of bucket", "if", "not", "bucket_name", "in", "self", ".", "bucket_list", ":", "if", "not", "bucket_name", "in", "self", ".", "list_buckets", "(", ")", ":", "raise", "ValueError", "(", "'S3 Bucket \"%s\" does not exist in aws region %s.'", "%", "(", "bucket_name", ",", "self", ".", "iam", ".", "region_name", ")", ")", "# create key word argument dictionary", "kw_args", "=", "{", "'Bucket'", ":", "bucket_name", "}", "if", "starting_key", ":", "kw_args", "[", "'KeyMarker'", "]", "=", "starting_key", "if", "starting_version", ":", "kw_args", "[", "'VersionIdMarker'", "]", "=", "starting_version", "if", "prefix", ":", "kw_args", "[", "'Prefix'", "]", "=", "prefix", "if", "delimiter", ":", "kw_args", "[", "'Delimiter'", "]", "=", "delimiter", "if", "max_results", ":", "kw_args", "[", "'MaxKeys'", "]", "=", "max_results", "# send request for objects", "record_list", "=", "[", "]", "next_key", "=", "{", "}", "try", ":", "response", "=", "self", ".", "connection", ".", "list_object_versions", "(", "*", "*", "kw_args", ")", "except", ":", "raise", "AWSConnectionError", "(", "title", ")", "# add version keys and ids to object list", "if", "'Versions'", "in", "response", ":", "for", "record", "in", "response", "[", "'Versions'", "]", ":", "details", "=", "{", "'key'", ":", "''", ",", "'version_id'", ":", "''", "}", "details", "=", "self", ".", "iam", ".", "ingest", "(", "record", ",", "details", ")", "epoch_zero", "=", "datetime", ".", "fromtimestamp", "(", "0", ")", ".", "replace", "(", "tzinfo", "=", "tzutc", "(", ")", ")", "details", "[", "'last_modified'", "]", "=", "(", "details", "[", "'last_modified'", "]", "-", "epoch_zero", ")", ".", "total_seconds", "(", ")", "details", "[", "'current_version'", "]", "=", "details", "[", "'is_latest'", "]", "del", "details", "[", "'is_latest'", "]", "record_list", ".", "append", "(", "details", ")", "# add delete markers to object list", "if", "'DeleteMarkers'", "in", "response", ":", "for", "record", "in", "response", "[", "'DeleteMarkers'", "]", ":", "details", "=", "{", "'key'", ":", "''", ",", "'version_id'", ":", "''", "}", "details", "=", "self", ".", "iam", ".", "ingest", "(", "record", ",", "details", ")", "epoch_zero", "=", "datetime", ".", "fromtimestamp", "(", "0", ")", ".", "replace", "(", "tzinfo", "=", "tzutc", "(", ")", ")", "details", "[", "'last_modified'", "]", "=", "(", "details", "[", "'last_modified'", "]", "-", "epoch_zero", ")", ".", "total_seconds", "(", ")", "details", "[", "'current_version'", "]", "=", "details", "[", "'is_latest'", "]", "del", "details", "[", "'is_latest'", "]", "if", "not", "'size'", "in", "details", ".", "keys", "(", ")", ":", "details", "[", "'size'", "]", "=", "0", "record_list", ".", "append", "(", "details", ")", "# define next key value", "if", "response", "[", "'IsTruncated'", "]", ":", "next_key", "=", "{", "'key'", ":", "response", "[", "'NextKeyMarker'", "]", ",", "'version_id'", ":", "response", "[", "'NextVersionIdMarker'", "]", "}", "return", "record_list", ",", "next_key" ]
40.038462
21.173077
def _get_omim_ids(self): ''' side effect: populate omim_type map from a omim number to an ontology term the ontology terms's labels as - 'gene' when they declare it as a gene - 'Phenotype' Phenotype, molecular basis known - 'heritable_phenotypic_marker' Phenotype or locus, molecular basis unknown - 'obsolete' when Removed or moved to another entry - 'has_affected_feature' "when declared as "Gene and phenotype, combined" hope being it could be detected and used as either :return a unique list of omim numbers ''' src_key = 'mim2gene' omim_nums = set() # all types line_counter = 0 raw = '/'.join((self.rawdir, self.files[src_key]['file'])) LOG.info("Obtaining OMIM record identifiers from: %s", raw) # TODO check to see if the file is there col = self.files[src_key]['columns'] with open(raw, "r") as reader: reader.readline() # copyright reader.readline() # Generated: YYYY-MM-DD reader.readline() # discription reader.readline() # disclaimer line = reader.readline() # column headers row = line.strip().split('\t') if row != col: # assert LOG.error('Expected %s to have columns: %s', raw, col) LOG.error('But Found %s to have columns: %s', raw, row) raise AssertionError('Incomming data headers have changed.') line_counter = 5 for line in reader: line_counter += 1 row = line.strip().split('\t') if len(row) != len(col): LOG.warning( 'Unexpected input on line: %i got: %s', line_counter, row) continue omim_num = row[col.index('MIM Number')] mimtype = row[col.index( 'MIM Entry Type (see FAQ 1.3 at https://omim.org/help/faq)')] # ncbigene = row[col.index('Entrez Gene ID (NCBI)')] # hgnc = row[col.index('Approved Gene Symbol (HGNC)')] # ensembl = row[col.index('Ensembl Gene ID (Ensembl)')] omim_nums.update({omim_num}) self.omim_type[omim_num] = None if mimtype == 'gene': self.omim_type[omim_num] = self.globaltt['gene'] # Phenotype, molecular basis known elif mimtype == 'phenotype': self.omim_type[omim_num] = self.globaltt['Phenotype'] # Phenotype or locus, molecular basis unknown elif mimtype == 'predominantly phenotypes': self.omim_type[omim_num] = self.globaltt[ 'heritable_phenotypic_marker'] # ? # Removed or moved to another entry elif mimtype == 'moved/removed': self.omim_type[omim_num] = self.globaltt['obsolete'] # "Gene and phenotype, combined" works as both/either. elif mimtype == 'gene/phenotype': self.omim_type[omim_num] = self.globaltt['has_affected_feature'] else: LOG.warning( 'Unknown OMIM TYPE of %s on line %i', mimtype, line_counter) LOG.info("Done. found %d omim ids", len(omim_nums)) return list(omim_nums)
[ "def", "_get_omim_ids", "(", "self", ")", ":", "src_key", "=", "'mim2gene'", "omim_nums", "=", "set", "(", ")", "# all types", "line_counter", "=", "0", "raw", "=", "'/'", ".", "join", "(", "(", "self", ".", "rawdir", ",", "self", ".", "files", "[", "src_key", "]", "[", "'file'", "]", ")", ")", "LOG", ".", "info", "(", "\"Obtaining OMIM record identifiers from: %s\"", ",", "raw", ")", "# TODO check to see if the file is there", "col", "=", "self", ".", "files", "[", "src_key", "]", "[", "'columns'", "]", "with", "open", "(", "raw", ",", "\"r\"", ")", "as", "reader", ":", "reader", ".", "readline", "(", ")", "# copyright", "reader", ".", "readline", "(", ")", "# Generated: YYYY-MM-DD", "reader", ".", "readline", "(", ")", "# discription", "reader", ".", "readline", "(", ")", "# disclaimer", "line", "=", "reader", ".", "readline", "(", ")", "# column headers", "row", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "'\\t'", ")", "if", "row", "!=", "col", ":", "# assert", "LOG", ".", "error", "(", "'Expected %s to have columns: %s'", ",", "raw", ",", "col", ")", "LOG", ".", "error", "(", "'But Found %s to have columns: %s'", ",", "raw", ",", "row", ")", "raise", "AssertionError", "(", "'Incomming data headers have changed.'", ")", "line_counter", "=", "5", "for", "line", "in", "reader", ":", "line_counter", "+=", "1", "row", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "'\\t'", ")", "if", "len", "(", "row", ")", "!=", "len", "(", "col", ")", ":", "LOG", ".", "warning", "(", "'Unexpected input on line: %i got: %s'", ",", "line_counter", ",", "row", ")", "continue", "omim_num", "=", "row", "[", "col", ".", "index", "(", "'MIM Number'", ")", "]", "mimtype", "=", "row", "[", "col", ".", "index", "(", "'MIM Entry Type (see FAQ 1.3 at https://omim.org/help/faq)'", ")", "]", "# ncbigene = row[col.index('Entrez Gene ID (NCBI)')]", "# hgnc = row[col.index('Approved Gene Symbol (HGNC)')]", "# ensembl = row[col.index('Ensembl Gene ID (Ensembl)')]", "omim_nums", ".", "update", "(", "{", "omim_num", "}", ")", "self", ".", "omim_type", "[", "omim_num", "]", "=", "None", "if", "mimtype", "==", "'gene'", ":", "self", ".", "omim_type", "[", "omim_num", "]", "=", "self", ".", "globaltt", "[", "'gene'", "]", "# Phenotype, molecular basis known", "elif", "mimtype", "==", "'phenotype'", ":", "self", ".", "omim_type", "[", "omim_num", "]", "=", "self", ".", "globaltt", "[", "'Phenotype'", "]", "# Phenotype or locus, molecular basis unknown", "elif", "mimtype", "==", "'predominantly phenotypes'", ":", "self", ".", "omim_type", "[", "omim_num", "]", "=", "self", ".", "globaltt", "[", "'heritable_phenotypic_marker'", "]", "# ?", "# Removed or moved to another entry", "elif", "mimtype", "==", "'moved/removed'", ":", "self", ".", "omim_type", "[", "omim_num", "]", "=", "self", ".", "globaltt", "[", "'obsolete'", "]", "# \"Gene and phenotype, combined\" works as both/either.", "elif", "mimtype", "==", "'gene/phenotype'", ":", "self", ".", "omim_type", "[", "omim_num", "]", "=", "self", ".", "globaltt", "[", "'has_affected_feature'", "]", "else", ":", "LOG", ".", "warning", "(", "'Unknown OMIM TYPE of %s on line %i'", ",", "mimtype", ",", "line_counter", ")", "LOG", ".", "info", "(", "\"Done. found %d omim ids\"", ",", "len", "(", "omim_nums", ")", ")", "return", "list", "(", "omim_nums", ")" ]
42.174419
20.895349
def erps(self, erps): '''Set the ERP values for this object's degrees of freedom. Parameters ---------- erps : float or sequence of float An ERP value to set on all degrees of freedom, or a list containing one such value for each degree of freedom. ''' _set_params(self.ode_obj, 'ERP', erps, self.ADOF + self.LDOF)
[ "def", "erps", "(", "self", ",", "erps", ")", ":", "_set_params", "(", "self", ".", "ode_obj", ",", "'ERP'", ",", "erps", ",", "self", ".", "ADOF", "+", "self", ".", "LDOF", ")" ]
37.8
24.2
def rmsd(ref_cds, est_cds): """ Root-mean-squared-difference """ ref_dists = pdist(ref_cds) est_dists = pdist(est_cds) return np.sqrt(((ref_dists - est_dists)**2).mean())
[ "def", "rmsd", "(", "ref_cds", ",", "est_cds", ")", ":", "ref_dists", "=", "pdist", "(", "ref_cds", ")", "est_dists", "=", "pdist", "(", "est_cds", ")", "return", "np", ".", "sqrt", "(", "(", "(", "ref_dists", "-", "est_dists", ")", "**", "2", ")", ".", "mean", "(", ")", ")" ]
26.857143
8
def make_account_admin(self, user_id, account_id, role=None, role_id=None, send_confirmation=None): """ Make an account admin. Flag an existing user as an admin within the account. """ path = {} data = {} params = {} # REQUIRED - PATH - account_id """ID""" path["account_id"] = account_id # REQUIRED - user_id """The id of the user to promote.""" data["user_id"] = user_id # OPTIONAL - role """(deprecated) The user's admin relationship with the account will be created with the given role. Defaults to 'AccountAdmin'.""" if role is not None: data["role"] = role # OPTIONAL - role_id """The user's admin relationship with the account will be created with the given role. Defaults to the built-in role for 'AccountAdmin'.""" if role_id is not None: data["role_id"] = role_id # OPTIONAL - send_confirmation """Send a notification email to the new admin if true. Default is true.""" if send_confirmation is not None: data["send_confirmation"] = send_confirmation self.logger.debug("POST /api/v1/accounts/{account_id}/admins with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("POST", "/api/v1/accounts/{account_id}/admins".format(**path), data=data, params=params, single_item=True)
[ "def", "make_account_admin", "(", "self", ",", "user_id", ",", "account_id", ",", "role", "=", "None", ",", "role_id", "=", "None", ",", "send_confirmation", "=", "None", ")", ":", "path", "=", "{", "}", "data", "=", "{", "}", "params", "=", "{", "}", "# REQUIRED - PATH - account_id\r", "\"\"\"ID\"\"\"", "path", "[", "\"account_id\"", "]", "=", "account_id", "# REQUIRED - user_id\r", "\"\"\"The id of the user to promote.\"\"\"", "data", "[", "\"user_id\"", "]", "=", "user_id", "# OPTIONAL - role\r", "\"\"\"(deprecated)\r\n The user's admin relationship with the account will be created with the\r\n given role. Defaults to 'AccountAdmin'.\"\"\"", "if", "role", "is", "not", "None", ":", "data", "[", "\"role\"", "]", "=", "role", "# OPTIONAL - role_id\r", "\"\"\"The user's admin relationship with the account will be created with the\r\n given role. Defaults to the built-in role for 'AccountAdmin'.\"\"\"", "if", "role_id", "is", "not", "None", ":", "data", "[", "\"role_id\"", "]", "=", "role_id", "# OPTIONAL - send_confirmation\r", "\"\"\"Send a notification email to\r\n the new admin if true. Default is true.\"\"\"", "if", "send_confirmation", "is", "not", "None", ":", "data", "[", "\"send_confirmation\"", "]", "=", "send_confirmation", "self", ".", "logger", ".", "debug", "(", "\"POST /api/v1/accounts/{account_id}/admins with query params: {params} and form data: {data}\"", ".", "format", "(", "params", "=", "params", ",", "data", "=", "data", ",", "*", "*", "path", ")", ")", "return", "self", ".", "generic_request", "(", "\"POST\"", ",", "\"/api/v1/accounts/{account_id}/admins\"", ".", "format", "(", "*", "*", "path", ")", ",", "data", "=", "data", ",", "params", "=", "params", ",", "single_item", "=", "True", ")" ]
38.846154
20.128205
def predict_mhcii_binding(job, peptfile, allele, univ_options, mhcii_options): """ This module will predict MHC:peptide binding for peptides in the files created in node YY to ALLELE. ALLELE represents an MHCII allele. The module returns (PREDFILE, PREDICTOR) where PREDFILE contains the predictions and PREDICTOR is the predictor used (Consensus, NetMHCIIpan, or Sturniolo). This module corresponds to node 19 on the tree """ job.fileStore.logToMaster('Running mhcii on %s:%s' % (univ_options['patient'], allele)) work_dir = job.fileStore.getLocalTempDir() input_files = { 'peptfile.faa': peptfile} input_files = get_files_from_filestore(job, input_files, work_dir, docker=True) parameters = [mhcii_options['pred'], allele, input_files['peptfile.faa']] with open('/'.join([work_dir, 'predictions.tsv']), 'w') as predfile: docker_call(tool='mhcii', tool_parameters=parameters, work_dir=work_dir, dockerhub=univ_options['dockerhub'], outfile=predfile, interactive=True) run_netMHCIIpan = True with open(predfile.name, 'r') as predfile: for line in predfile: if not line.startswith('HLA'): continue if line.strip().split('\t')[5] == 'NetMHCIIpan': break # If the predictor type is sturniolo then it needs to be processed differently elif line.strip().split('\t')[5] == 'Sturniolo': predictor = 'Sturniolo' else: predictor = 'Consensus' run_netMHCIIpan = False break if run_netMHCIIpan: NetMHCIIpan = job.addChildJobFn(predict_netmhcii_binding, peptfile, allele, univ_options, disk='10G') return NetMHCIIpan.rv() else: output_file = job.fileStore.writeGlobalFile(predfile.name) return output_file, predictor
[ "def", "predict_mhcii_binding", "(", "job", ",", "peptfile", ",", "allele", ",", "univ_options", ",", "mhcii_options", ")", ":", "job", ".", "fileStore", ".", "logToMaster", "(", "'Running mhcii on %s:%s'", "%", "(", "univ_options", "[", "'patient'", "]", ",", "allele", ")", ")", "work_dir", "=", "job", ".", "fileStore", ".", "getLocalTempDir", "(", ")", "input_files", "=", "{", "'peptfile.faa'", ":", "peptfile", "}", "input_files", "=", "get_files_from_filestore", "(", "job", ",", "input_files", ",", "work_dir", ",", "docker", "=", "True", ")", "parameters", "=", "[", "mhcii_options", "[", "'pred'", "]", ",", "allele", ",", "input_files", "[", "'peptfile.faa'", "]", "]", "with", "open", "(", "'/'", ".", "join", "(", "[", "work_dir", ",", "'predictions.tsv'", "]", ")", ",", "'w'", ")", "as", "predfile", ":", "docker_call", "(", "tool", "=", "'mhcii'", ",", "tool_parameters", "=", "parameters", ",", "work_dir", "=", "work_dir", ",", "dockerhub", "=", "univ_options", "[", "'dockerhub'", "]", ",", "outfile", "=", "predfile", ",", "interactive", "=", "True", ")", "run_netMHCIIpan", "=", "True", "with", "open", "(", "predfile", ".", "name", ",", "'r'", ")", "as", "predfile", ":", "for", "line", "in", "predfile", ":", "if", "not", "line", ".", "startswith", "(", "'HLA'", ")", ":", "continue", "if", "line", ".", "strip", "(", ")", ".", "split", "(", "'\\t'", ")", "[", "5", "]", "==", "'NetMHCIIpan'", ":", "break", "# If the predictor type is sturniolo then it needs to be processed differently", "elif", "line", ".", "strip", "(", ")", ".", "split", "(", "'\\t'", ")", "[", "5", "]", "==", "'Sturniolo'", ":", "predictor", "=", "'Sturniolo'", "else", ":", "predictor", "=", "'Consensus'", "run_netMHCIIpan", "=", "False", "break", "if", "run_netMHCIIpan", ":", "NetMHCIIpan", "=", "job", ".", "addChildJobFn", "(", "predict_netmhcii_binding", ",", "peptfile", ",", "allele", ",", "univ_options", ",", "disk", "=", "'10G'", ")", "return", "NetMHCIIpan", ".", "rv", "(", ")", "else", ":", "output_file", "=", "job", ".", "fileStore", ".", "writeGlobalFile", "(", "predfile", ".", "name", ")", "return", "output_file", ",", "predictor" ]
46.047619
21.714286
def format(self, dt, fmt, locale=None): """ Formats a DateTime instance with a given format and locale. :param dt: The instance to format :type dt: pendulum.DateTime :param fmt: The format to use :type fmt: str :param locale: The locale to use :type locale: str or Locale or None :rtype: str """ if not locale: locale = pendulum.get_locale() locale = Locale.load(locale) result = self._FORMAT_RE.sub( lambda m: m.group(1) if m.group(1) else m.group(2) if m.group(2) else self._format_token(dt, m.group(3), locale), fmt, ) return decode(result)
[ "def", "format", "(", "self", ",", "dt", ",", "fmt", ",", "locale", "=", "None", ")", ":", "if", "not", "locale", ":", "locale", "=", "pendulum", ".", "get_locale", "(", ")", "locale", "=", "Locale", ".", "load", "(", "locale", ")", "result", "=", "self", ".", "_FORMAT_RE", ".", "sub", "(", "lambda", "m", ":", "m", ".", "group", "(", "1", ")", "if", "m", ".", "group", "(", "1", ")", "else", "m", ".", "group", "(", "2", ")", "if", "m", ".", "group", "(", "2", ")", "else", "self", ".", "_format_token", "(", "dt", ",", "m", ".", "group", "(", "3", ")", ",", "locale", ")", ",", "fmt", ",", ")", "return", "decode", "(", "result", ")" ]
24.166667
17.433333
def _multiprocessing_to_asyncio(in_queue, out_queue, loop): """Bridge between a synchronous multiprocessing queue and an asynchronous asyncio queue. Args: in_queue (multiprocessing.Queue): input queue out_queue (asyncio.Queue): output queue """ while True: value = in_queue.get() loop.call_soon_threadsafe(out_queue.put_nowait, value)
[ "def", "_multiprocessing_to_asyncio", "(", "in_queue", ",", "out_queue", ",", "loop", ")", ":", "while", "True", ":", "value", "=", "in_queue", ".", "get", "(", ")", "loop", ".", "call_soon_threadsafe", "(", "out_queue", ".", "put_nowait", ",", "value", ")" ]
31.416667
17.416667
def NearestNeighborLearner(dataset, k=1): "k-NearestNeighbor: the k nearest neighbors vote." def predict(example): "Find the k closest, and have them vote for the best." best = heapq.nsmallest(k, ((dataset.distance(e, example), e) for e in dataset.examples)) return mode(e[dataset.target] for (d, e) in best) return predict
[ "def", "NearestNeighborLearner", "(", "dataset", ",", "k", "=", "1", ")", ":", "def", "predict", "(", "example", ")", ":", "\"Find the k closest, and have them vote for the best.\"", "best", "=", "heapq", ".", "nsmallest", "(", "k", ",", "(", "(", "dataset", ".", "distance", "(", "e", ",", "example", ")", ",", "e", ")", "for", "e", "in", "dataset", ".", "examples", ")", ")", "return", "mode", "(", "e", "[", "dataset", ".", "target", "]", "for", "(", "d", ",", "e", ")", "in", "best", ")", "return", "predict" ]
48.375
17.625
def make_app(config=None): """ Factory function that creates a new `CoolmagicApplication` object. Optional WSGI middlewares should be applied here. """ config = config or {} app = CoolMagicApplication(config) # static stuff app = SharedDataMiddleware( app, {"/public": path.join(path.dirname(__file__), "public")} ) # clean up locals app = local_manager.make_middleware(app) return app
[ "def", "make_app", "(", "config", "=", "None", ")", ":", "config", "=", "config", "or", "{", "}", "app", "=", "CoolMagicApplication", "(", "config", ")", "# static stuff", "app", "=", "SharedDataMiddleware", "(", "app", ",", "{", "\"/public\"", ":", "path", ".", "join", "(", "path", ".", "dirname", "(", "__file__", ")", ",", "\"public\"", ")", "}", ")", "# clean up locals", "app", "=", "local_manager", ".", "make_middleware", "(", "app", ")", "return", "app" ]
25.176471
19.882353
def dbsource(dbname, var, resolution=None, tscale=None): """Return which file(s) to use according to dbname, var, etc """ db_cfg = {} cfg_dir = 'datasource' cfg_files = pkg_resources.resource_listdir('oceansdb', cfg_dir) cfg_files = [f for f in cfg_files if f[-5:] == '.json'] for src_cfg in cfg_files: text = pkg_resources.resource_string( 'oceansdb', os.path.join(cfg_dir, src_cfg)) text = text.decode('UTF-8', 'replace') cfg = json.loads(text) for c in cfg: assert c not in db_cfg, "Trying to overwrite %s" db_cfg[c] = cfg[c] dbpath = oceansdb_dir() datafiles = [] cfg = db_cfg[dbname] if (resolution is None): resolution = cfg['vars'][var]['default_resolution'] if (tscale is None): tscale = cfg['vars'][var][resolution]["default_tscale"] for c in cfg['vars'][var][resolution][tscale]: download_file(outputdir=dbpath, **c) if 'filename' in c: filename = os.path.join(dbpath, c['filename']) else: filename = os.path.join(dbpath, os.path.basename(urlparse(c['url']).path)) if 'varnames' in cfg['vars'][var][resolution]: datafiles.append(Dataset_flex(filename, aliases=cfg['vars'][var][resolution]['varnames'])) else: datafiles.append(Dataset_flex(filename)) return datafiles
[ "def", "dbsource", "(", "dbname", ",", "var", ",", "resolution", "=", "None", ",", "tscale", "=", "None", ")", ":", "db_cfg", "=", "{", "}", "cfg_dir", "=", "'datasource'", "cfg_files", "=", "pkg_resources", ".", "resource_listdir", "(", "'oceansdb'", ",", "cfg_dir", ")", "cfg_files", "=", "[", "f", "for", "f", "in", "cfg_files", "if", "f", "[", "-", "5", ":", "]", "==", "'.json'", "]", "for", "src_cfg", "in", "cfg_files", ":", "text", "=", "pkg_resources", ".", "resource_string", "(", "'oceansdb'", ",", "os", ".", "path", ".", "join", "(", "cfg_dir", ",", "src_cfg", ")", ")", "text", "=", "text", ".", "decode", "(", "'UTF-8'", ",", "'replace'", ")", "cfg", "=", "json", ".", "loads", "(", "text", ")", "for", "c", "in", "cfg", ":", "assert", "c", "not", "in", "db_cfg", ",", "\"Trying to overwrite %s\"", "db_cfg", "[", "c", "]", "=", "cfg", "[", "c", "]", "dbpath", "=", "oceansdb_dir", "(", ")", "datafiles", "=", "[", "]", "cfg", "=", "db_cfg", "[", "dbname", "]", "if", "(", "resolution", "is", "None", ")", ":", "resolution", "=", "cfg", "[", "'vars'", "]", "[", "var", "]", "[", "'default_resolution'", "]", "if", "(", "tscale", "is", "None", ")", ":", "tscale", "=", "cfg", "[", "'vars'", "]", "[", "var", "]", "[", "resolution", "]", "[", "\"default_tscale\"", "]", "for", "c", "in", "cfg", "[", "'vars'", "]", "[", "var", "]", "[", "resolution", "]", "[", "tscale", "]", ":", "download_file", "(", "outputdir", "=", "dbpath", ",", "*", "*", "c", ")", "if", "'filename'", "in", "c", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "dbpath", ",", "c", "[", "'filename'", "]", ")", "else", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "dbpath", ",", "os", ".", "path", ".", "basename", "(", "urlparse", "(", "c", "[", "'url'", "]", ")", ".", "path", ")", ")", "if", "'varnames'", "in", "cfg", "[", "'vars'", "]", "[", "var", "]", "[", "resolution", "]", ":", "datafiles", ".", "append", "(", "Dataset_flex", "(", "filename", ",", "aliases", "=", "cfg", "[", "'vars'", "]", "[", "var", "]", "[", "resolution", "]", "[", "'varnames'", "]", ")", ")", "else", ":", "datafiles", ".", "append", "(", "Dataset_flex", "(", "filename", ")", ")", "return", "datafiles" ]
33.571429
19.261905
def cursor_to_data_header(cursor): """Fetches all rows from query ("cursor") and returns a pair (data, header) Returns: (data, header), where - data is a [num_rows]x[num_cols] sequence of sequences; - header is a [num_cols] list containing the field names """ n = 0 data, header = [], {} for row in cursor: if n == 0: header = row.keys() data.append(row.values()) return data, list(header)
[ "def", "cursor_to_data_header", "(", "cursor", ")", ":", "n", "=", "0", "data", ",", "header", "=", "[", "]", ",", "{", "}", "for", "row", "in", "cursor", ":", "if", "n", "==", "0", ":", "header", "=", "row", ".", "keys", "(", ")", "data", ".", "append", "(", "row", ".", "values", "(", ")", ")", "return", "data", ",", "list", "(", "header", ")" ]
33
14.714286
def simple_peak_find(s, init_slope=500, start_slope=500, end_slope=200, min_peak_height=50, max_peak_width=1.5): """ Given a Series, return a list of tuples indicating when peaks start and stop and what their baseline is. [(t_start, t_end, hints) ...] """ point_gap = 10 def slid_win(itr, size=2): """Returns a sliding window of size 'size' along itr.""" itr, buf = iter(itr), [] for _ in range(size): buf += [next(itr)] for l in itr: yield buf buf = buf[1:] + [l] yield buf # TODO: check these smoothing defaults y, t = s.values, s.index.astype(float) smooth_y = movingaverage(y, 9) dxdt = np.gradient(smooth_y) / np.gradient(t) # dxdt = -savitzkygolay(ts, 5, 3, deriv=1).y / np.gradient(t) init_slopes = np.arange(len(dxdt))[dxdt > init_slope] if len(init_slopes) == 0: return [] # get the first points of any "runs" as a peak start # runs can have a gap of up to 10 points in them peak_sts = [init_slopes[0]] peak_sts += [j for i, j in slid_win(init_slopes, 2) if j - i > 10] peak_sts.sort() en_slopes = np.arange(len(dxdt))[dxdt < -end_slope] if len(en_slopes) == 0: return [] # filter out any lone points farther than 10 away from their neighbors en_slopes = [en_slopes[0]] en_slopes += [i[1] for i in slid_win(en_slopes, 3) if i[1] - i[0] < point_gap or i[2] - i[1] < point_gap] en_slopes += [en_slopes[-1]] # get the last points of any "runs" as a peak end peak_ens = [j for i, j in slid_win(en_slopes[::-1], 2) if i - j > point_gap] + [en_slopes[-1]] peak_ens.sort() # avals = np.arange(len(t))[np.abs(t - 0.675) < 0.25] # print([i for i in en_slopes if i in avals]) # print([(t[i], i) for i in peak_ens if i in avals]) peak_list = [] pk2 = 0 for pk in peak_sts: # don't allow overlapping peaks if pk < pk2: continue # track backwards to find the true start while dxdt[pk] > start_slope and pk > 0: pk -= 1 # now find where the peak ends dist_to_end = np.array(peak_ens) - pk pos_end = pk + dist_to_end[dist_to_end > 0] for pk2 in pos_end: if (y[pk2] - y[pk]) / (t[pk2] - t[pk]) > start_slope: # if the baseline beneath the peak is too large, let's # keep going to the next dip peak_list.append({'t0': t[pk], 't1': t[pk2]}) pk = pk2 elif t[pk2] - t[pk] > max_peak_width: # make sure that peak is short enough pk2 = pk + np.abs(t[pk:] - t[pk] - max_peak_width).argmin() break else: break else: # if no end point is found, the end point # is the end of the timeseries pk2 = len(t) - 1 if pk == pk2: continue pk_hgt = max(y[pk:pk2]) - min(y[pk:pk2]) if pk_hgt < min_peak_height: continue peak_list.append({'t0': t[pk], 't1': t[pk2]}) return peak_list
[ "def", "simple_peak_find", "(", "s", ",", "init_slope", "=", "500", ",", "start_slope", "=", "500", ",", "end_slope", "=", "200", ",", "min_peak_height", "=", "50", ",", "max_peak_width", "=", "1.5", ")", ":", "point_gap", "=", "10", "def", "slid_win", "(", "itr", ",", "size", "=", "2", ")", ":", "\"\"\"Returns a sliding window of size 'size' along itr.\"\"\"", "itr", ",", "buf", "=", "iter", "(", "itr", ")", ",", "[", "]", "for", "_", "in", "range", "(", "size", ")", ":", "buf", "+=", "[", "next", "(", "itr", ")", "]", "for", "l", "in", "itr", ":", "yield", "buf", "buf", "=", "buf", "[", "1", ":", "]", "+", "[", "l", "]", "yield", "buf", "# TODO: check these smoothing defaults", "y", ",", "t", "=", "s", ".", "values", ",", "s", ".", "index", ".", "astype", "(", "float", ")", "smooth_y", "=", "movingaverage", "(", "y", ",", "9", ")", "dxdt", "=", "np", ".", "gradient", "(", "smooth_y", ")", "/", "np", ".", "gradient", "(", "t", ")", "# dxdt = -savitzkygolay(ts, 5, 3, deriv=1).y / np.gradient(t)", "init_slopes", "=", "np", ".", "arange", "(", "len", "(", "dxdt", ")", ")", "[", "dxdt", ">", "init_slope", "]", "if", "len", "(", "init_slopes", ")", "==", "0", ":", "return", "[", "]", "# get the first points of any \"runs\" as a peak start", "# runs can have a gap of up to 10 points in them", "peak_sts", "=", "[", "init_slopes", "[", "0", "]", "]", "peak_sts", "+=", "[", "j", "for", "i", ",", "j", "in", "slid_win", "(", "init_slopes", ",", "2", ")", "if", "j", "-", "i", ">", "10", "]", "peak_sts", ".", "sort", "(", ")", "en_slopes", "=", "np", ".", "arange", "(", "len", "(", "dxdt", ")", ")", "[", "dxdt", "<", "-", "end_slope", "]", "if", "len", "(", "en_slopes", ")", "==", "0", ":", "return", "[", "]", "# filter out any lone points farther than 10 away from their neighbors", "en_slopes", "=", "[", "en_slopes", "[", "0", "]", "]", "en_slopes", "+=", "[", "i", "[", "1", "]", "for", "i", "in", "slid_win", "(", "en_slopes", ",", "3", ")", "if", "i", "[", "1", "]", "-", "i", "[", "0", "]", "<", "point_gap", "or", "i", "[", "2", "]", "-", "i", "[", "1", "]", "<", "point_gap", "]", "en_slopes", "+=", "[", "en_slopes", "[", "-", "1", "]", "]", "# get the last points of any \"runs\" as a peak end", "peak_ens", "=", "[", "j", "for", "i", ",", "j", "in", "slid_win", "(", "en_slopes", "[", ":", ":", "-", "1", "]", ",", "2", ")", "if", "i", "-", "j", ">", "point_gap", "]", "+", "[", "en_slopes", "[", "-", "1", "]", "]", "peak_ens", ".", "sort", "(", ")", "# avals = np.arange(len(t))[np.abs(t - 0.675) < 0.25]", "# print([i for i in en_slopes if i in avals])", "# print([(t[i], i) for i in peak_ens if i in avals])", "peak_list", "=", "[", "]", "pk2", "=", "0", "for", "pk", "in", "peak_sts", ":", "# don't allow overlapping peaks", "if", "pk", "<", "pk2", ":", "continue", "# track backwards to find the true start", "while", "dxdt", "[", "pk", "]", ">", "start_slope", "and", "pk", ">", "0", ":", "pk", "-=", "1", "# now find where the peak ends", "dist_to_end", "=", "np", ".", "array", "(", "peak_ens", ")", "-", "pk", "pos_end", "=", "pk", "+", "dist_to_end", "[", "dist_to_end", ">", "0", "]", "for", "pk2", "in", "pos_end", ":", "if", "(", "y", "[", "pk2", "]", "-", "y", "[", "pk", "]", ")", "/", "(", "t", "[", "pk2", "]", "-", "t", "[", "pk", "]", ")", ">", "start_slope", ":", "# if the baseline beneath the peak is too large, let's", "# keep going to the next dip", "peak_list", ".", "append", "(", "{", "'t0'", ":", "t", "[", "pk", "]", ",", "'t1'", ":", "t", "[", "pk2", "]", "}", ")", "pk", "=", "pk2", "elif", "t", "[", "pk2", "]", "-", "t", "[", "pk", "]", ">", "max_peak_width", ":", "# make sure that peak is short enough", "pk2", "=", "pk", "+", "np", ".", "abs", "(", "t", "[", "pk", ":", "]", "-", "t", "[", "pk", "]", "-", "max_peak_width", ")", ".", "argmin", "(", ")", "break", "else", ":", "break", "else", ":", "# if no end point is found, the end point", "# is the end of the timeseries", "pk2", "=", "len", "(", "t", ")", "-", "1", "if", "pk", "==", "pk2", ":", "continue", "pk_hgt", "=", "max", "(", "y", "[", "pk", ":", "pk2", "]", ")", "-", "min", "(", "y", "[", "pk", ":", "pk2", "]", ")", "if", "pk_hgt", "<", "min_peak_height", ":", "continue", "peak_list", ".", "append", "(", "{", "'t0'", ":", "t", "[", "pk", "]", ",", "'t1'", ":", "t", "[", "pk2", "]", "}", ")", "return", "peak_list" ]
35.397727
17.397727
def save(self): """ :return: save this environment on Ariane server (create or update) """ LOGGER.debug("Environment.save") post_payload = {} consolidated_osi_id = [] if self.id is not None: post_payload['environmentID'] = self.id if self.name is not None: post_payload['environmentName'] = self.name if self.description is not None: post_payload['environmentDescription'] = self.description if self.color_code is not None: post_payload['environmentColorCode'] = self.color_code if self.osi_ids is not None: consolidated_osi_id = copy.deepcopy(self.osi_ids) if self.osi_2_rm is not None: for osi_2_rm in self.osi_2_rm: if osi_2_rm.id is None: osi_2_rm.sync() consolidated_osi_id.remove(osi_2_rm.id) if self.osi_2_add is not None: for osi_id_2_add in self.osi_2_add: if osi_id_2_add.id is None: osi_id_2_add.save() consolidated_osi_id.append(osi_id_2_add.id) post_payload['environmentOSInstancesID'] = consolidated_osi_id args = {'http_operation': 'POST', 'operation_path': '', 'parameters': {'payload': json.dumps(post_payload)}} response = EnvironmentService.requester.call(args) if response.rc != 0: LOGGER.warning( 'Environment.save - Problem while saving environment ' + self.name + '. Reason: ' + str(response.response_content) + '-' + str(response.error_message) + " (" + str(response.rc) + ")" ) else: self.id = response.response_content['environmentID'] if self.osi_2_add is not None: for osi_2_add in self.osi_2_add: osi_2_add.sync() if self.osi_2_rm is not None: for osi_2_rm in self.osi_2_rm: osi_2_rm.sync() self.osi_2_add.clear() self.osi_2_rm.clear() self.sync() return self
[ "def", "save", "(", "self", ")", ":", "LOGGER", ".", "debug", "(", "\"Environment.save\"", ")", "post_payload", "=", "{", "}", "consolidated_osi_id", "=", "[", "]", "if", "self", ".", "id", "is", "not", "None", ":", "post_payload", "[", "'environmentID'", "]", "=", "self", ".", "id", "if", "self", ".", "name", "is", "not", "None", ":", "post_payload", "[", "'environmentName'", "]", "=", "self", ".", "name", "if", "self", ".", "description", "is", "not", "None", ":", "post_payload", "[", "'environmentDescription'", "]", "=", "self", ".", "description", "if", "self", ".", "color_code", "is", "not", "None", ":", "post_payload", "[", "'environmentColorCode'", "]", "=", "self", ".", "color_code", "if", "self", ".", "osi_ids", "is", "not", "None", ":", "consolidated_osi_id", "=", "copy", ".", "deepcopy", "(", "self", ".", "osi_ids", ")", "if", "self", ".", "osi_2_rm", "is", "not", "None", ":", "for", "osi_2_rm", "in", "self", ".", "osi_2_rm", ":", "if", "osi_2_rm", ".", "id", "is", "None", ":", "osi_2_rm", ".", "sync", "(", ")", "consolidated_osi_id", ".", "remove", "(", "osi_2_rm", ".", "id", ")", "if", "self", ".", "osi_2_add", "is", "not", "None", ":", "for", "osi_id_2_add", "in", "self", ".", "osi_2_add", ":", "if", "osi_id_2_add", ".", "id", "is", "None", ":", "osi_id_2_add", ".", "save", "(", ")", "consolidated_osi_id", ".", "append", "(", "osi_id_2_add", ".", "id", ")", "post_payload", "[", "'environmentOSInstancesID'", "]", "=", "consolidated_osi_id", "args", "=", "{", "'http_operation'", ":", "'POST'", ",", "'operation_path'", ":", "''", ",", "'parameters'", ":", "{", "'payload'", ":", "json", ".", "dumps", "(", "post_payload", ")", "}", "}", "response", "=", "EnvironmentService", ".", "requester", ".", "call", "(", "args", ")", "if", "response", ".", "rc", "!=", "0", ":", "LOGGER", ".", "warning", "(", "'Environment.save - Problem while saving environment '", "+", "self", ".", "name", "+", "'. Reason: '", "+", "str", "(", "response", ".", "response_content", ")", "+", "'-'", "+", "str", "(", "response", ".", "error_message", ")", "+", "\" (\"", "+", "str", "(", "response", ".", "rc", ")", "+", "\")\"", ")", "else", ":", "self", ".", "id", "=", "response", ".", "response_content", "[", "'environmentID'", "]", "if", "self", ".", "osi_2_add", "is", "not", "None", ":", "for", "osi_2_add", "in", "self", ".", "osi_2_add", ":", "osi_2_add", ".", "sync", "(", ")", "if", "self", ".", "osi_2_rm", "is", "not", "None", ":", "for", "osi_2_rm", "in", "self", ".", "osi_2_rm", ":", "osi_2_rm", ".", "sync", "(", ")", "self", ".", "osi_2_add", ".", "clear", "(", ")", "self", ".", "osi_2_rm", ".", "clear", "(", ")", "self", ".", "sync", "(", ")", "return", "self" ]
38.611111
17.166667
def require_server(fn): """ Checks if the user has called the task with a server name. Fabric tasks decorated with this decorator must be called like so:: fab <server name> <task name> If no server name is given, the task will not be executed. """ @wraps(fn) def wrapper(*args, **kwargs): if env.machine is None: abort(red('ERROR: You must provide a server name to call this' ' task!')) return fn(*args, **kwargs) return wrapper
[ "def", "require_server", "(", "fn", ")", ":", "@", "wraps", "(", "fn", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "env", ".", "machine", "is", "None", ":", "abort", "(", "red", "(", "'ERROR: You must provide a server name to call this'", "' task!'", ")", ")", "return", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
28.055556
20.388889
def iterate_nodes(self, string_key, distinct=True): """Given a string key it returns the nodes as a generator that can hold the key. The generator iterates one time through the ring starting at the correct position. if `distinct` is set, then the nodes returned will be unique, i.e. no virtual copies will be returned. """ if not self.ring: yield None, None returned_values = set() def distinct_filter(value): if str(value) not in returned_values: returned_values.add(str(value)) return value pos = self.get_node_pos(string_key) for key in self._sorted_keys[pos:]: val = distinct_filter(self.ring[key]) if val: yield val for i, key in enumerate(self._sorted_keys): if i < pos: val = distinct_filter(self.ring[key]) if val: yield val
[ "def", "iterate_nodes", "(", "self", ",", "string_key", ",", "distinct", "=", "True", ")", ":", "if", "not", "self", ".", "ring", ":", "yield", "None", ",", "None", "returned_values", "=", "set", "(", ")", "def", "distinct_filter", "(", "value", ")", ":", "if", "str", "(", "value", ")", "not", "in", "returned_values", ":", "returned_values", ".", "add", "(", "str", "(", "value", ")", ")", "return", "value", "pos", "=", "self", ".", "get_node_pos", "(", "string_key", ")", "for", "key", "in", "self", ".", "_sorted_keys", "[", "pos", ":", "]", ":", "val", "=", "distinct_filter", "(", "self", ".", "ring", "[", "key", "]", ")", "if", "val", ":", "yield", "val", "for", "i", ",", "key", "in", "enumerate", "(", "self", ".", "_sorted_keys", ")", ":", "if", "i", "<", "pos", ":", "val", "=", "distinct_filter", "(", "self", ".", "ring", "[", "key", "]", ")", "if", "val", ":", "yield", "val" ]
32.166667
16.466667
def remove_null_proxy_kwarg(func): """decorator, to remove a 'proxy' keyword argument. For wrapping certain Manager methods""" def wrapper(*args, **kwargs): if 'proxy' in kwargs: # if kwargs['proxy'] is None: del kwargs['proxy'] # else: # raise InvalidArgument('Manager sessions cannot be called with Proxies. Use ProxyManager instead') return func(*args, **kwargs) return wrapper
[ "def", "remove_null_proxy_kwarg", "(", "func", ")", ":", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "'proxy'", "in", "kwargs", ":", "# if kwargs['proxy'] is None:", "del", "kwargs", "[", "'proxy'", "]", "# else:", "# raise InvalidArgument('Manager sessions cannot be called with Proxies. Use ProxyManager instead')", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
45
15.5
def _ann_store_annotations(self, item_with_annotations, node, overwrite=False): """Stores annotations into an hdf5 file.""" # If we overwrite delete all annotations first if overwrite is True or overwrite == 'v_annotations': annotated = self._all_get_from_attrs(node, HDF5StorageService.ANNOTATED) if annotated: current_attrs = node._v_attrs for attr_name in current_attrs._v_attrnames: if attr_name.startswith(HDF5StorageService.ANNOTATION_PREFIX): delattr(current_attrs, attr_name) delattr(current_attrs, HDF5StorageService.ANNOTATED) self._hdf5file.flush() # Only store annotations if the item has some if not item_with_annotations.v_annotations.f_is_empty(): anno_dict = item_with_annotations.v_annotations._dict current_attrs = node._v_attrs changed = False for field_name in anno_dict: val = anno_dict[field_name] field_name_with_prefix = HDF5StorageService.ANNOTATION_PREFIX + field_name if field_name_with_prefix not in current_attrs: # Only store *new* annotations, if they already exist on disk, skip storage setattr(current_attrs, field_name_with_prefix, val) changed = True if changed: setattr(current_attrs, HDF5StorageService.ANNOTATED, True) self._hdf5file.flush()
[ "def", "_ann_store_annotations", "(", "self", ",", "item_with_annotations", ",", "node", ",", "overwrite", "=", "False", ")", ":", "# If we overwrite delete all annotations first", "if", "overwrite", "is", "True", "or", "overwrite", "==", "'v_annotations'", ":", "annotated", "=", "self", ".", "_all_get_from_attrs", "(", "node", ",", "HDF5StorageService", ".", "ANNOTATED", ")", "if", "annotated", ":", "current_attrs", "=", "node", ".", "_v_attrs", "for", "attr_name", "in", "current_attrs", ".", "_v_attrnames", ":", "if", "attr_name", ".", "startswith", "(", "HDF5StorageService", ".", "ANNOTATION_PREFIX", ")", ":", "delattr", "(", "current_attrs", ",", "attr_name", ")", "delattr", "(", "current_attrs", ",", "HDF5StorageService", ".", "ANNOTATED", ")", "self", ".", "_hdf5file", ".", "flush", "(", ")", "# Only store annotations if the item has some", "if", "not", "item_with_annotations", ".", "v_annotations", ".", "f_is_empty", "(", ")", ":", "anno_dict", "=", "item_with_annotations", ".", "v_annotations", ".", "_dict", "current_attrs", "=", "node", ".", "_v_attrs", "changed", "=", "False", "for", "field_name", "in", "anno_dict", ":", "val", "=", "anno_dict", "[", "field_name", "]", "field_name_with_prefix", "=", "HDF5StorageService", ".", "ANNOTATION_PREFIX", "+", "field_name", "if", "field_name_with_prefix", "not", "in", "current_attrs", ":", "# Only store *new* annotations, if they already exist on disk, skip storage", "setattr", "(", "current_attrs", ",", "field_name_with_prefix", ",", "val", ")", "changed", "=", "True", "if", "changed", ":", "setattr", "(", "current_attrs", ",", "HDF5StorageService", ".", "ANNOTATED", ",", "True", ")", "self", ".", "_hdf5file", ".", "flush", "(", ")" ]
43.571429
24.685714
def font(self): """Font object controlling text format defaults for this chart.""" defRPr = ( self._chartSpace .get_or_add_txPr() .p_lst[0] .get_or_add_pPr() .get_or_add_defRPr() ) return Font(defRPr)
[ "def", "font", "(", "self", ")", ":", "defRPr", "=", "(", "self", ".", "_chartSpace", ".", "get_or_add_txPr", "(", ")", ".", "p_lst", "[", "0", "]", ".", "get_or_add_pPr", "(", ")", ".", "get_or_add_defRPr", "(", ")", ")", "return", "Font", "(", "defRPr", ")" ]
29.9
13.5
def fail(self, tup): """Indicate that processing of a Tuple has failed. :param tup: the Tuple to fail (its ``id`` if ``str``). :type tup: :class:`str` or :class:`pystorm.component.Tuple` """ tup_id = tup.id if isinstance(tup, Tuple) else tup self.send_message({"command": "fail", "id": tup_id})
[ "def", "fail", "(", "self", ",", "tup", ")", ":", "tup_id", "=", "tup", ".", "id", "if", "isinstance", "(", "tup", ",", "Tuple", ")", "else", "tup", "self", ".", "send_message", "(", "{", "\"command\"", ":", "\"fail\"", ",", "\"id\"", ":", "tup_id", "}", ")" ]
42
18.375
def _Backward3_sat_v_P(P, T, x): """Backward equation for region 3 for saturated state, vs=f(P,x) Parameters ---------- T : float Temperature, [K] P : float Pressure, [MPa] x : integer Vapor quality, [-] Returns ------- v : float Specific volume, [m³/kg] Notes ----- The vapor quality (x) can be 0 (saturated liquid) or 1 (saturated vapour) """ if x == 0: if P < 19.00881189: region = "c" elif P < 21.0434: region = "s" elif P < 21.9316: region = "u" else: region = "y" else: if P < 20.5: region = "t" elif P < 21.0434: region = "r" elif P < 21.9009: region = "x" else: region = "z" return _Backward3x_v_PT(T, P, region)
[ "def", "_Backward3_sat_v_P", "(", "P", ",", "T", ",", "x", ")", ":", "if", "x", "==", "0", ":", "if", "P", "<", "19.00881189", ":", "region", "=", "\"c\"", "elif", "P", "<", "21.0434", ":", "region", "=", "\"s\"", "elif", "P", "<", "21.9316", ":", "region", "=", "\"u\"", "else", ":", "region", "=", "\"y\"", "else", ":", "if", "P", "<", "20.5", ":", "region", "=", "\"t\"", "elif", "P", "<", "21.0434", ":", "region", "=", "\"r\"", "elif", "P", "<", "21.9009", ":", "region", "=", "\"x\"", "else", ":", "region", "=", "\"z\"", "return", "_Backward3x_v_PT", "(", "T", ",", "P", ",", "region", ")" ]
20.487805
21.243902
def escore(self): """(property) Returns the E-score associated with the result.""" hg_pval_thresh = self.escore_pval_thresh or self.pval escore_tol = self.escore_tol or mhg_cython.get_default_tol() es = mhg_cython.get_xlmhg_escore( self.indices, self.N, self.K, self.X, self.L, hg_pval_thresh, escore_tol) return es
[ "def", "escore", "(", "self", ")", ":", "hg_pval_thresh", "=", "self", ".", "escore_pval_thresh", "or", "self", ".", "pval", "escore_tol", "=", "self", ".", "escore_tol", "or", "mhg_cython", ".", "get_default_tol", "(", ")", "es", "=", "mhg_cython", ".", "get_xlmhg_escore", "(", "self", ".", "indices", ",", "self", ".", "N", ",", "self", ".", "K", ",", "self", ".", "X", ",", "self", ".", "L", ",", "hg_pval_thresh", ",", "escore_tol", ")", "return", "es" ]
46.5
14.25
def Beach(fm, linewidth=2, facecolor='b', bgcolor='w', edgecolor='k', alpha=1.0, xy=(0, 0), width=200, size=100, nofill=False, zorder=100, axes=None): """ Return a beach ball as a collection which can be connected to an current matplotlib axes instance (ax.add_collection). S1, D1, and R1, the strike, dip and rake of one of the focal planes, can be vectors of multiple focal mechanisms. :param fm: Focal mechanism that is either number of mechanisms (NM) by 3 (strike, dip, and rake) or NM x 6 (M11, M22, M33, M12, M13, M23 - the six independent components of the moment tensor, where the coordinate system is 1,2,3 = Up,South,East which equals r,theta,phi). The strike is of the first plane, clockwise relative to north. The dip is of the first plane, defined clockwise and perpendicular to strike, relative to horizontal such that 0 is horizontal and 90 is vertical. The rake is of the first focal plane solution. 90 moves the hanging wall up-dip (thrust), 0 moves it in the strike direction (left-lateral), -90 moves it down-dip (normal), and 180 moves it opposite to strike (right-lateral). :param facecolor: Color to use for quadrants of tension; can be a string, e.g. ``'r'``, ``'b'`` or three component color vector, [R G B]. Defaults to ``'b'`` (blue). :param bgcolor: The background color. Defaults to ``'w'`` (white). :param edgecolor: Color of the edges. Defaults to ``'k'`` (black). :param alpha: The alpha level of the beach ball. Defaults to ``1.0`` (opaque). :param xy: Origin position of the beach ball as tuple. Defaults to ``(0, 0)``. :type width: int or tuple :param width: Symbol size of beach ball, or tuple for elliptically shaped patches. Defaults to size ``200``. :param size: Controls the number of interpolation points for the curves. Minimum is automatically set to ``100``. :param nofill: Do not fill the beach ball, but only plot the planes. :param zorder: Set zorder. Artists with lower zorder values are drawn first. :type axes: :class:`matplotlib.axes.Axes` :param axes: Used to make beach balls circular on non-scaled axes. Also maintains the aspect ratio when resizing the figure. Will not add the returned collection to the axes instance. """ # check if one or two widths are specified (Circle or Ellipse) try: assert(len(width) == 2) except TypeError: width = (width, width) mt = None np1 = None if isinstance(fm, MomentTensor): mt = fm np1 = MT2Plane(mt) elif isinstance(fm, NodalPlane): np1 = fm elif len(fm) == 6: mt = MomentTensor(fm[0], fm[1], fm[2], fm[3], fm[4], fm[5], 0) np1 = MT2Plane(mt) elif len(fm) == 3: np1 = NodalPlane(fm[0], fm[1], fm[2]) else: raise TypeError("Wrong input value for 'fm'.") # Only at least size 100, i.e. 100 points in the matrix are allowed if size < 100: size = 100 # Return as collection if mt: (T, N, P) = MT2Axes(mt) if np.fabs(N.val) < EPSILON and np.fabs(T.val + P.val) < EPSILON: colors, p = plotDC(np1, size, xy=xy, width=width) else: colors, p = plotMT(T, N, P, size, plot_zerotrace=True, xy=xy, width=width) else: colors, p = plotDC(np1, size=size, xy=xy, width=width) if nofill: # XXX: not tested with plotMT col = collections.PatchCollection([p[1]], match_original=False) col.set_facecolor('none') else: col = collections.PatchCollection(p, match_original=False) # Replace color dummies 'b' and 'w' by face and bgcolor fc = [facecolor if c == 'b' else bgcolor for c in colors] col.set_facecolors(fc) # Use the given axes to maintain the aspect ratio of beachballs on figure # resize. if axes is not None: # This is what holds the aspect ratio (but breaks the positioning) col.set_transform(transforms.IdentityTransform()) # Next is a dirty hack to fix the positioning: # 1. Need to bring the all patches to the origin (0, 0). for p in col._paths: p.vertices -= xy # 2. Then use the offset property of the collection to position the # patches col.set_offsets(xy) col._transOffset = axes.transData col.set_edgecolor(edgecolor) col.set_alpha(alpha) col.set_linewidth(linewidth) col.set_zorder(zorder) return col
[ "def", "Beach", "(", "fm", ",", "linewidth", "=", "2", ",", "facecolor", "=", "'b'", ",", "bgcolor", "=", "'w'", ",", "edgecolor", "=", "'k'", ",", "alpha", "=", "1.0", ",", "xy", "=", "(", "0", ",", "0", ")", ",", "width", "=", "200", ",", "size", "=", "100", ",", "nofill", "=", "False", ",", "zorder", "=", "100", ",", "axes", "=", "None", ")", ":", "# check if one or two widths are specified (Circle or Ellipse)", "try", ":", "assert", "(", "len", "(", "width", ")", "==", "2", ")", "except", "TypeError", ":", "width", "=", "(", "width", ",", "width", ")", "mt", "=", "None", "np1", "=", "None", "if", "isinstance", "(", "fm", ",", "MomentTensor", ")", ":", "mt", "=", "fm", "np1", "=", "MT2Plane", "(", "mt", ")", "elif", "isinstance", "(", "fm", ",", "NodalPlane", ")", ":", "np1", "=", "fm", "elif", "len", "(", "fm", ")", "==", "6", ":", "mt", "=", "MomentTensor", "(", "fm", "[", "0", "]", ",", "fm", "[", "1", "]", ",", "fm", "[", "2", "]", ",", "fm", "[", "3", "]", ",", "fm", "[", "4", "]", ",", "fm", "[", "5", "]", ",", "0", ")", "np1", "=", "MT2Plane", "(", "mt", ")", "elif", "len", "(", "fm", ")", "==", "3", ":", "np1", "=", "NodalPlane", "(", "fm", "[", "0", "]", ",", "fm", "[", "1", "]", ",", "fm", "[", "2", "]", ")", "else", ":", "raise", "TypeError", "(", "\"Wrong input value for 'fm'.\"", ")", "# Only at least size 100, i.e. 100 points in the matrix are allowed", "if", "size", "<", "100", ":", "size", "=", "100", "# Return as collection", "if", "mt", ":", "(", "T", ",", "N", ",", "P", ")", "=", "MT2Axes", "(", "mt", ")", "if", "np", ".", "fabs", "(", "N", ".", "val", ")", "<", "EPSILON", "and", "np", ".", "fabs", "(", "T", ".", "val", "+", "P", ".", "val", ")", "<", "EPSILON", ":", "colors", ",", "p", "=", "plotDC", "(", "np1", ",", "size", ",", "xy", "=", "xy", ",", "width", "=", "width", ")", "else", ":", "colors", ",", "p", "=", "plotMT", "(", "T", ",", "N", ",", "P", ",", "size", ",", "plot_zerotrace", "=", "True", ",", "xy", "=", "xy", ",", "width", "=", "width", ")", "else", ":", "colors", ",", "p", "=", "plotDC", "(", "np1", ",", "size", "=", "size", ",", "xy", "=", "xy", ",", "width", "=", "width", ")", "if", "nofill", ":", "# XXX: not tested with plotMT", "col", "=", "collections", ".", "PatchCollection", "(", "[", "p", "[", "1", "]", "]", ",", "match_original", "=", "False", ")", "col", ".", "set_facecolor", "(", "'none'", ")", "else", ":", "col", "=", "collections", ".", "PatchCollection", "(", "p", ",", "match_original", "=", "False", ")", "# Replace color dummies 'b' and 'w' by face and bgcolor", "fc", "=", "[", "facecolor", "if", "c", "==", "'b'", "else", "bgcolor", "for", "c", "in", "colors", "]", "col", ".", "set_facecolors", "(", "fc", ")", "# Use the given axes to maintain the aspect ratio of beachballs on figure", "# resize.", "if", "axes", "is", "not", "None", ":", "# This is what holds the aspect ratio (but breaks the positioning)", "col", ".", "set_transform", "(", "transforms", ".", "IdentityTransform", "(", ")", ")", "# Next is a dirty hack to fix the positioning:", "# 1. Need to bring the all patches to the origin (0, 0).", "for", "p", "in", "col", ".", "_paths", ":", "p", ".", "vertices", "-=", "xy", "# 2. Then use the offset property of the collection to position the", "# patches", "col", ".", "set_offsets", "(", "xy", ")", "col", ".", "_transOffset", "=", "axes", ".", "transData", "col", ".", "set_edgecolor", "(", "edgecolor", ")", "col", ".", "set_alpha", "(", "alpha", ")", "col", ".", "set_linewidth", "(", "linewidth", ")", "col", ".", "set_zorder", "(", "zorder", ")", "return", "col" ]
42.542056
22.82243
def gen_keys(keysize=2048): ''' Generate Salt minion keys and return them as PEM file strings ''' # Mandate that keys are at least 2048 in size if keysize < 2048: keysize = 2048 tdir = tempfile.mkdtemp() salt.crypt.gen_keys(tdir, 'minion', keysize) priv_path = os.path.join(tdir, 'minion.pem') pub_path = os.path.join(tdir, 'minion.pub') with salt.utils.files.fopen(priv_path) as fp_: priv = salt.utils.stringutils.to_unicode(fp_.read()) with salt.utils.files.fopen(pub_path) as fp_: pub = salt.utils.stringutils.to_unicode(fp_.read()) shutil.rmtree(tdir) return priv, pub
[ "def", "gen_keys", "(", "keysize", "=", "2048", ")", ":", "# Mandate that keys are at least 2048 in size", "if", "keysize", "<", "2048", ":", "keysize", "=", "2048", "tdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "salt", ".", "crypt", ".", "gen_keys", "(", "tdir", ",", "'minion'", ",", "keysize", ")", "priv_path", "=", "os", ".", "path", ".", "join", "(", "tdir", ",", "'minion.pem'", ")", "pub_path", "=", "os", ".", "path", ".", "join", "(", "tdir", ",", "'minion.pub'", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "priv_path", ")", "as", "fp_", ":", "priv", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "fp_", ".", "read", "(", ")", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "pub_path", ")", "as", "fp_", ":", "pub", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "fp_", ".", "read", "(", ")", ")", "shutil", ".", "rmtree", "(", "tdir", ")", "return", "priv", ",", "pub" ]
35.111111
17.666667
def match(self, f, *args): """Match grammar function 'f' against next token and set 'self.matched'. Arguments: f: A grammar function - see efilter.parsers.common.grammar. Must return TokenMatch or None. args: Passed to 'f', if any. Returns: Instance of efilter.parsers.common.grammar.TokenMatch or None. Comment: If a match is returned, it will also be stored in self.matched. """ try: match = f(self.tokenizer, *args) except StopIteration: # The grammar function might have tried to access more tokens than # are available. That's not really an error, it just means it didn't # match. return if match is None: return if not isinstance(match, grammar.TokenMatch): raise TypeError("Invalid grammar function %r returned %r." % (f, match)) self.matched = match return match
[ "def", "match", "(", "self", ",", "f", ",", "*", "args", ")", ":", "try", ":", "match", "=", "f", "(", "self", ".", "tokenizer", ",", "*", "args", ")", "except", "StopIteration", ":", "# The grammar function might have tried to access more tokens than", "# are available. That's not really an error, it just means it didn't", "# match.", "return", "if", "match", "is", "None", ":", "return", "if", "not", "isinstance", "(", "match", ",", "grammar", ".", "TokenMatch", ")", ":", "raise", "TypeError", "(", "\"Invalid grammar function %r returned %r.\"", "%", "(", "f", ",", "match", ")", ")", "self", ".", "matched", "=", "match", "return", "match" ]
32.580645
22.806452