text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def ensureKeysAreSetup(self): """ Check whether the keys are setup in the local STP keep. Raises KeysNotFoundException if not found. """ if not areKeysSetup(self.name, self.keys_dir): raise REx(REx.reason.format(self.name) + self.keygenScript)
[ "def", "ensureKeysAreSetup", "(", "self", ")", ":", "if", "not", "areKeysSetup", "(", "self", ".", "name", ",", "self", ".", "keys_dir", ")", ":", "raise", "REx", "(", "REx", ".", "reason", ".", "format", "(", "self", ".", "name", ")", "+", "self", ".", "keygenScript", ")" ]
41.285714
12.714286
def toggle_use_font_background_sensitivity(self, chk): """If the user chooses to use the gnome default font configuration it means that he will not be able to use the font selector. """ self.get_widget('palette_16').set_sensitive(chk.get_active()) self.get_widget('palette_17').set_sensitive(chk.get_active())
[ "def", "toggle_use_font_background_sensitivity", "(", "self", ",", "chk", ")", ":", "self", ".", "get_widget", "(", "'palette_16'", ")", ".", "set_sensitive", "(", "chk", ".", "get_active", "(", ")", ")", "self", ".", "get_widget", "(", "'palette_17'", ")", ".", "set_sensitive", "(", "chk", ".", "get_active", "(", ")", ")" ]
50.142857
16.571429
def p_continue_statement_1(self, p): """continue_statement : CONTINUE SEMI | CONTINUE AUTOSEMI """ p[0] = self.asttypes.Continue() p[0].setpos(p)
[ "def", "p_continue_statement_1", "(", "self", ",", "p", ")", ":", "p", "[", "0", "]", "=", "self", ".", "asttypes", ".", "Continue", "(", ")", "p", "[", "0", "]", ".", "setpos", "(", "p", ")" ]
33.666667
5.333333
def setTopLevelItems( self, items ): """ Initializes the navigation system to start with the inputed root \ item. :param item | <XNavigationItem> """ if ( not self._navigationModel ): self.setNavigationModel(XNavigationModel(self)) self._navigationModel.setTopLevelItems(items)
[ "def", "setTopLevelItems", "(", "self", ",", "items", ")", ":", "if", "(", "not", "self", ".", "_navigationModel", ")", ":", "self", ".", "setNavigationModel", "(", "XNavigationModel", "(", "self", ")", ")", "self", ".", "_navigationModel", ".", "setTopLevelItems", "(", "items", ")" ]
32.545455
15.090909
def search_memories(self): """ Search and return list of 1-wire memories. """ if not self.connected: raise NotConnected() return self._cf.mem.get_mems(MemoryElement.TYPE_1W)
[ "def", "search_memories", "(", "self", ")", ":", "if", "not", "self", ".", "connected", ":", "raise", "NotConnected", "(", ")", "return", "self", ".", "_cf", ".", "mem", ".", "get_mems", "(", "MemoryElement", ".", "TYPE_1W", ")" ]
31.285714
8.714286
def invoke( self, cli, args=None, prog_name=None, input=None, terminate_input=False, env=None, _output_lines=None, **extra ): """Like :meth:`CliRunner.invoke` but displays what the user would enter in the terminal for env vars, command args, and prompts. :param terminate_input: Whether to display "^D" after a list of input. :param _output_lines: A list used internally to collect lines to be displayed. """ output_lines = _output_lines if _output_lines is not None else [] if env: for key, value in sorted(env.items()): value = shlex.quote(value) output_lines.append("$ export {}={}".format(key, value)) args = args or [] if prog_name is None: prog_name = cli.name.replace("_", "-") output_lines.append( "$ {} {}".format(prog_name, " ".join(shlex.quote(x) for x in args)).rstrip() ) # remove "python" from command prog_name = prog_name.rsplit(" ", 1)[-1] if isinstance(input, (tuple, list)): input = "\n".join(input) + "\n" if terminate_input: input += "\x04" result = super(ExampleRunner, self).invoke( cli=cli, args=args, input=input, env=env, prog_name=prog_name, **extra ) output_lines.extend(result.output.splitlines()) return result
[ "def", "invoke", "(", "self", ",", "cli", ",", "args", "=", "None", ",", "prog_name", "=", "None", ",", "input", "=", "None", ",", "terminate_input", "=", "False", ",", "env", "=", "None", ",", "_output_lines", "=", "None", ",", "*", "*", "extra", ")", ":", "output_lines", "=", "_output_lines", "if", "_output_lines", "is", "not", "None", "else", "[", "]", "if", "env", ":", "for", "key", ",", "value", "in", "sorted", "(", "env", ".", "items", "(", ")", ")", ":", "value", "=", "shlex", ".", "quote", "(", "value", ")", "output_lines", ".", "append", "(", "\"$ export {}={}\"", ".", "format", "(", "key", ",", "value", ")", ")", "args", "=", "args", "or", "[", "]", "if", "prog_name", "is", "None", ":", "prog_name", "=", "cli", ".", "name", ".", "replace", "(", "\"_\"", ",", "\"-\"", ")", "output_lines", ".", "append", "(", "\"$ {} {}\"", ".", "format", "(", "prog_name", ",", "\" \"", ".", "join", "(", "shlex", ".", "quote", "(", "x", ")", "for", "x", "in", "args", ")", ")", ".", "rstrip", "(", ")", ")", "# remove \"python\" from command", "prog_name", "=", "prog_name", ".", "rsplit", "(", "\" \"", ",", "1", ")", "[", "-", "1", "]", "if", "isinstance", "(", "input", ",", "(", "tuple", ",", "list", ")", ")", ":", "input", "=", "\"\\n\"", ".", "join", "(", "input", ")", "+", "\"\\n\"", "if", "terminate_input", ":", "input", "+=", "\"\\x04\"", "result", "=", "super", "(", "ExampleRunner", ",", "self", ")", ".", "invoke", "(", "cli", "=", "cli", ",", "args", "=", "args", ",", "input", "=", "input", ",", "env", "=", "env", ",", "prog_name", "=", "prog_name", ",", "*", "*", "extra", ")", "output_lines", ".", "extend", "(", "result", ".", "output", ".", "splitlines", "(", ")", ")", "return", "result" ]
30.040816
22.489796
def source_channels(self): """ Returns a set describing the source channels on which the gate is defined. """ source_channels = [v.coordinates.keys() for v in self.verts] return set(itertools.chain(*source_channels))
[ "def", "source_channels", "(", "self", ")", ":", "source_channels", "=", "[", "v", ".", "coordinates", ".", "keys", "(", ")", "for", "v", "in", "self", ".", "verts", "]", "return", "set", "(", "itertools", ".", "chain", "(", "*", "source_channels", ")", ")" ]
59.25
13.75
def compute_duration_measures(self): """ Helper function for computing measures derived from timing information. These are only computed if the response is textgrid with timing information. All times are in seconds. """ prefix = "TIMING_" + self.current_similarity_measure + "_" + self.current_collection_type + "_" if self.response_format == 'TextGrid': self.compute_response_vowel_duration("TIMING_") #prefixes don't need collection or measure type self.compute_response_continuant_duration("TIMING_") self.compute_between_collection_interval_duration(prefix) self.compute_within_collection_interval_duration(prefix) #these give different values depending on whether singleton clusters are counted or not self.compute_within_collection_vowel_duration(prefix, no_singletons = True) self.compute_within_collection_continuant_duration(prefix, no_singletons = True) self.compute_within_collection_vowel_duration(prefix, no_singletons = False) self.compute_within_collection_continuant_duration(prefix, no_singletons = False)
[ "def", "compute_duration_measures", "(", "self", ")", ":", "prefix", "=", "\"TIMING_\"", "+", "self", ".", "current_similarity_measure", "+", "\"_\"", "+", "self", ".", "current_collection_type", "+", "\"_\"", "if", "self", ".", "response_format", "==", "'TextGrid'", ":", "self", ".", "compute_response_vowel_duration", "(", "\"TIMING_\"", ")", "#prefixes don't need collection or measure type", "self", ".", "compute_response_continuant_duration", "(", "\"TIMING_\"", ")", "self", ".", "compute_between_collection_interval_duration", "(", "prefix", ")", "self", ".", "compute_within_collection_interval_duration", "(", "prefix", ")", "#these give different values depending on whether singleton clusters are counted or not", "self", ".", "compute_within_collection_vowel_duration", "(", "prefix", ",", "no_singletons", "=", "True", ")", "self", ".", "compute_within_collection_continuant_duration", "(", "prefix", ",", "no_singletons", "=", "True", ")", "self", ".", "compute_within_collection_vowel_duration", "(", "prefix", ",", "no_singletons", "=", "False", ")", "self", ".", "compute_within_collection_continuant_duration", "(", "prefix", ",", "no_singletons", "=", "False", ")" ]
52.909091
35.090909
def set_quantity(self, twig=None, value=None, **kwargs): """ TODO: add documentation """ # TODO: handle twig having parameter key (value@, default_unit@, adjust@, etc) # TODO: does this return anything (update the docstring)? return self.get_parameter(twig=twig, **kwargs).set_quantity(value=value, **kwargs)
[ "def", "set_quantity", "(", "self", ",", "twig", "=", "None", ",", "value", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# TODO: handle twig having parameter key (value@, default_unit@, adjust@, etc)", "# TODO: does this return anything (update the docstring)?", "return", "self", ".", "get_parameter", "(", "twig", "=", "twig", ",", "*", "*", "kwargs", ")", ".", "set_quantity", "(", "value", "=", "value", ",", "*", "*", "kwargs", ")" ]
50
20.857143
def assignrepr(self, prefix) -> str: """Return a string representation of the actual |anntools.ANN| object that is prefixed with the given string.""" prefix = '%s%s(' % (prefix, self.name) blanks = len(prefix)*' ' lines = [ objecttools.assignrepr_value( self.nmb_inputs, '%snmb_inputs=' % prefix)+',', objecttools.assignrepr_tuple( self.nmb_neurons, '%snmb_neurons=' % blanks)+',', objecttools.assignrepr_value( self.nmb_outputs, '%snmb_outputs=' % blanks)+',', objecttools.assignrepr_list2( self.weights_input, '%sweights_input=' % blanks)+','] if self.nmb_layers > 1: lines.append(objecttools.assignrepr_list3( self.weights_hidden, '%sweights_hidden=' % blanks)+',') lines.append(objecttools.assignrepr_list2( self.weights_output, '%sweights_output=' % blanks)+',') lines.append(objecttools.assignrepr_list2( self.intercepts_hidden, '%sintercepts_hidden=' % blanks)+',') lines.append(objecttools.assignrepr_list( self.intercepts_output, '%sintercepts_output=' % blanks)+')') return '\n'.join(lines)
[ "def", "assignrepr", "(", "self", ",", "prefix", ")", "->", "str", ":", "prefix", "=", "'%s%s('", "%", "(", "prefix", ",", "self", ".", "name", ")", "blanks", "=", "len", "(", "prefix", ")", "*", "' '", "lines", "=", "[", "objecttools", ".", "assignrepr_value", "(", "self", ".", "nmb_inputs", ",", "'%snmb_inputs='", "%", "prefix", ")", "+", "','", ",", "objecttools", ".", "assignrepr_tuple", "(", "self", ".", "nmb_neurons", ",", "'%snmb_neurons='", "%", "blanks", ")", "+", "','", ",", "objecttools", ".", "assignrepr_value", "(", "self", ".", "nmb_outputs", ",", "'%snmb_outputs='", "%", "blanks", ")", "+", "','", ",", "objecttools", ".", "assignrepr_list2", "(", "self", ".", "weights_input", ",", "'%sweights_input='", "%", "blanks", ")", "+", "','", "]", "if", "self", ".", "nmb_layers", ">", "1", ":", "lines", ".", "append", "(", "objecttools", ".", "assignrepr_list3", "(", "self", ".", "weights_hidden", ",", "'%sweights_hidden='", "%", "blanks", ")", "+", "','", ")", "lines", ".", "append", "(", "objecttools", ".", "assignrepr_list2", "(", "self", ".", "weights_output", ",", "'%sweights_output='", "%", "blanks", ")", "+", "','", ")", "lines", ".", "append", "(", "objecttools", ".", "assignrepr_list2", "(", "self", ".", "intercepts_hidden", ",", "'%sintercepts_hidden='", "%", "blanks", ")", "+", "','", ")", "lines", ".", "append", "(", "objecttools", ".", "assignrepr_list", "(", "self", ".", "intercepts_output", ",", "'%sintercepts_output='", "%", "blanks", ")", "+", "')'", ")", "return", "'\\n'", ".", "join", "(", "lines", ")" ]
51.375
13.833333
def get_postgres_encoding(python_encoding: str) -> str: """Python to postgres encoding map.""" encoding = normalize_encoding(python_encoding.lower()) encoding_ = aliases.aliases[encoding.replace('_', '', 1)].upper() pg_encoding = PG_ENCODING_MAP[encoding_.replace('_', '')] return pg_encoding
[ "def", "get_postgres_encoding", "(", "python_encoding", ":", "str", ")", "->", "str", ":", "encoding", "=", "normalize_encoding", "(", "python_encoding", ".", "lower", "(", ")", ")", "encoding_", "=", "aliases", ".", "aliases", "[", "encoding", ".", "replace", "(", "'_'", ",", "''", ",", "1", ")", "]", ".", "upper", "(", ")", "pg_encoding", "=", "PG_ENCODING_MAP", "[", "encoding_", ".", "replace", "(", "'_'", ",", "''", ")", "]", "return", "pg_encoding" ]
38.375
22.625
def make_doc_id_range(doc_id): '''Construct a tuple(begin, end) of one-tuple kvlayer keys from a hexdigest doc_id. ''' assert len(doc_id) == 32, 'expecting 32 hex string, not: %r' % doc_id bin_docid = base64.b16decode(doc_id.upper()) doc_id_range = ((bin_docid,), (bin_docid,)) return doc_id_range
[ "def", "make_doc_id_range", "(", "doc_id", ")", ":", "assert", "len", "(", "doc_id", ")", "==", "32", ",", "'expecting 32 hex string, not: %r'", "%", "doc_id", "bin_docid", "=", "base64", ".", "b16decode", "(", "doc_id", ".", "upper", "(", ")", ")", "doc_id_range", "=", "(", "(", "bin_docid", ",", ")", ",", "(", "bin_docid", ",", ")", ")", "return", "doc_id_range" ]
35.333333
21.777778
def returner(ret): ''' Return data to an odbc server ''' conn = _get_conn(ret) cur = conn.cursor() sql = '''INSERT INTO salt_returns (fun, jid, retval, id, success, full_ret) VALUES (?, ?, ?, ?, ?, ?)''' cur.execute( sql, ( ret['fun'], ret['jid'], salt.utils.json.dumps(ret['return']), ret['id'], ret['success'], salt.utils.json.dumps(ret) ) ) _close_conn(conn)
[ "def", "returner", "(", "ret", ")", ":", "conn", "=", "_get_conn", "(", "ret", ")", "cur", "=", "conn", ".", "cursor", "(", ")", "sql", "=", "'''INSERT INTO salt_returns\n (fun, jid, retval, id, success, full_ret)\n VALUES (?, ?, ?, ?, ?, ?)'''", "cur", ".", "execute", "(", "sql", ",", "(", "ret", "[", "'fun'", "]", ",", "ret", "[", "'jid'", "]", ",", "salt", ".", "utils", ".", "json", ".", "dumps", "(", "ret", "[", "'return'", "]", ")", ",", "ret", "[", "'id'", "]", ",", "ret", "[", "'success'", "]", ",", "salt", ".", "utils", ".", "json", ".", "dumps", "(", "ret", ")", ")", ")", "_close_conn", "(", "conn", ")" ]
24.5
17.7
def encode(raw): """Encode SLIP message.""" return raw \ .replace(bytes([SLIP_ESC]), bytes([SLIP_ESC, SLIP_ESC_ESC])) \ .replace(bytes([SLIP_END]), bytes([SLIP_ESC, SLIP_ESC_END]))
[ "def", "encode", "(", "raw", ")", ":", "return", "raw", ".", "replace", "(", "bytes", "(", "[", "SLIP_ESC", "]", ")", ",", "bytes", "(", "[", "SLIP_ESC", ",", "SLIP_ESC_ESC", "]", ")", ")", ".", "replace", "(", "bytes", "(", "[", "SLIP_END", "]", ")", ",", "bytes", "(", "[", "SLIP_ESC", ",", "SLIP_ESC_END", "]", ")", ")" ]
40
21.2
def run_sambamba_markdup(job, bam): """ Marks reads as PCR duplicates using Sambamba :param JobFunctionWrappingJob job: passed automatically by Toil :param str bam: FileStoreID for BAM file :return: FileStoreID for sorted BAM file :rtype: str """ work_dir = job.fileStore.getLocalTempDir() job.fileStore.readGlobalFile(bam, os.path.join(work_dir, 'input.bam')) command = ['/usr/local/bin/sambamba', 'markdup', '-t', str(int(job.cores)), '/data/input.bam', '/data/output.bam'] start_time = time.time() dockerCall(job=job, workDir=work_dir, parameters=command, tool='quay.io/biocontainers/sambamba:0.6.6--0') end_time = time.time() _log_runtime(job, start_time, end_time, "sambamba mkdup") return job.fileStore.writeGlobalFile(os.path.join(work_dir, 'output.bam'))
[ "def", "run_sambamba_markdup", "(", "job", ",", "bam", ")", ":", "work_dir", "=", "job", ".", "fileStore", ".", "getLocalTempDir", "(", ")", "job", ".", "fileStore", ".", "readGlobalFile", "(", "bam", ",", "os", ".", "path", ".", "join", "(", "work_dir", ",", "'input.bam'", ")", ")", "command", "=", "[", "'/usr/local/bin/sambamba'", ",", "'markdup'", ",", "'-t'", ",", "str", "(", "int", "(", "job", ".", "cores", ")", ")", ",", "'/data/input.bam'", ",", "'/data/output.bam'", "]", "start_time", "=", "time", ".", "time", "(", ")", "dockerCall", "(", "job", "=", "job", ",", "workDir", "=", "work_dir", ",", "parameters", "=", "command", ",", "tool", "=", "'quay.io/biocontainers/sambamba:0.6.6--0'", ")", "end_time", "=", "time", ".", "time", "(", ")", "_log_runtime", "(", "job", ",", "start_time", ",", "end_time", ",", "\"sambamba mkdup\"", ")", "return", "job", ".", "fileStore", ".", "writeGlobalFile", "(", "os", ".", "path", ".", "join", "(", "work_dir", ",", "'output.bam'", ")", ")" ]
37.125
14.041667
def html_overall_stat( overall_stat, digit=5, overall_param=None, recommended_list=()): """ Return HTML report file overall stat. :param overall_stat: overall stat :type overall_stat : dict :param digit: scale (the number of digits to the right of the decimal point in a number.) :type digit : int :param overall_param : Overall parameters list for print, Example : ["Kappa","Scott PI] :type overall_param : list :param recommended_list: recommended statistics list :type recommended_list : list or tuple :return: html_overall_stat as str """ result = "" result += "<h2>Overall Statistics : </h2>\n" result += '<table style="border:1px solid black;border-collapse: collapse;">\n' overall_stat_keys = sorted(overall_stat.keys()) if isinstance(overall_param, list): if set(overall_param) <= set(overall_stat_keys): overall_stat_keys = sorted(overall_param) if len(overall_stat_keys) < 1: return "" for i in overall_stat_keys: background_color = DEFAULT_BACKGROUND_COLOR if i in recommended_list: background_color = RECOMMEND_BACKGROUND_COLOR result += '<tr align="center">\n' result += '<td style="border:1px solid black;padding:4px;text-align:left;background-color:{};"><a href="'.format( background_color) + DOCUMENT_ADR + PARAMS_LINK[i] + '" style="text-decoration:None;">' + str(i) + '</a></td>\n' if i in BENCHMARK_LIST: background_color = BENCHMARK_COLOR[overall_stat[i]] result += '<td style="border:1px solid black;padding:4px;background-color:{};">'.format( background_color) else: result += '<td style="border:1px solid black;padding:4px;">' result += rounder(overall_stat[i], digit) + '</td>\n' result += "</tr>\n" result += "</table>\n" return result
[ "def", "html_overall_stat", "(", "overall_stat", ",", "digit", "=", "5", ",", "overall_param", "=", "None", ",", "recommended_list", "=", "(", ")", ")", ":", "result", "=", "\"\"", "result", "+=", "\"<h2>Overall Statistics : </h2>\\n\"", "result", "+=", "'<table style=\"border:1px solid black;border-collapse: collapse;\">\\n'", "overall_stat_keys", "=", "sorted", "(", "overall_stat", ".", "keys", "(", ")", ")", "if", "isinstance", "(", "overall_param", ",", "list", ")", ":", "if", "set", "(", "overall_param", ")", "<=", "set", "(", "overall_stat_keys", ")", ":", "overall_stat_keys", "=", "sorted", "(", "overall_param", ")", "if", "len", "(", "overall_stat_keys", ")", "<", "1", ":", "return", "\"\"", "for", "i", "in", "overall_stat_keys", ":", "background_color", "=", "DEFAULT_BACKGROUND_COLOR", "if", "i", "in", "recommended_list", ":", "background_color", "=", "RECOMMEND_BACKGROUND_COLOR", "result", "+=", "'<tr align=\"center\">\\n'", "result", "+=", "'<td style=\"border:1px solid black;padding:4px;text-align:left;background-color:{};\"><a href=\"'", ".", "format", "(", "background_color", ")", "+", "DOCUMENT_ADR", "+", "PARAMS_LINK", "[", "i", "]", "+", "'\" style=\"text-decoration:None;\">'", "+", "str", "(", "i", ")", "+", "'</a></td>\\n'", "if", "i", "in", "BENCHMARK_LIST", ":", "background_color", "=", "BENCHMARK_COLOR", "[", "overall_stat", "[", "i", "]", "]", "result", "+=", "'<td style=\"border:1px solid black;padding:4px;background-color:{};\">'", ".", "format", "(", "background_color", ")", "else", ":", "result", "+=", "'<td style=\"border:1px solid black;padding:4px;\">'", "result", "+=", "rounder", "(", "overall_stat", "[", "i", "]", ",", "digit", ")", "+", "'</td>\\n'", "result", "+=", "\"</tr>\\n\"", "result", "+=", "\"</table>\\n\"", "return", "result" ]
43.227273
19.954545
def resample(self, inds): """Returns copy of constraint, with mask rearranged according to indices """ new = copy.deepcopy(self) for arr in self.arrays: x = getattr(new, arr) setattr(new, arr, x[inds]) return new
[ "def", "resample", "(", "self", ",", "inds", ")", ":", "new", "=", "copy", ".", "deepcopy", "(", "self", ")", "for", "arr", "in", "self", ".", "arrays", ":", "x", "=", "getattr", "(", "new", ",", "arr", ")", "setattr", "(", "new", ",", "arr", ",", "x", "[", "inds", "]", ")", "return", "new" ]
33.625
7.75
def RegisterArtifact(self, artifact_rdfvalue, source="datastore", overwrite_if_exists=False, overwrite_system_artifacts=False): """Registers a new artifact.""" artifact_name = artifact_rdfvalue.name if artifact_name in self._artifacts: if not overwrite_if_exists: details = "artifact already exists and `overwrite_if_exists` is unset" raise rdf_artifacts.ArtifactDefinitionError(artifact_name, details) elif not overwrite_system_artifacts: artifact_obj = self._artifacts[artifact_name] if not artifact_obj.loaded_from.startswith("datastore:"): # This artifact was not uploaded to the datastore but came from a # file, refuse to overwrite. details = "system artifact cannot be overwritten" raise rdf_artifacts.ArtifactDefinitionError(artifact_name, details) # Preserve where the artifact was loaded from to help debugging. artifact_rdfvalue.loaded_from = source # Clear any stale errors. artifact_rdfvalue.error_message = None self._artifacts[artifact_rdfvalue.name] = artifact_rdfvalue
[ "def", "RegisterArtifact", "(", "self", ",", "artifact_rdfvalue", ",", "source", "=", "\"datastore\"", ",", "overwrite_if_exists", "=", "False", ",", "overwrite_system_artifacts", "=", "False", ")", ":", "artifact_name", "=", "artifact_rdfvalue", ".", "name", "if", "artifact_name", "in", "self", ".", "_artifacts", ":", "if", "not", "overwrite_if_exists", ":", "details", "=", "\"artifact already exists and `overwrite_if_exists` is unset\"", "raise", "rdf_artifacts", ".", "ArtifactDefinitionError", "(", "artifact_name", ",", "details", ")", "elif", "not", "overwrite_system_artifacts", ":", "artifact_obj", "=", "self", ".", "_artifacts", "[", "artifact_name", "]", "if", "not", "artifact_obj", ".", "loaded_from", ".", "startswith", "(", "\"datastore:\"", ")", ":", "# This artifact was not uploaded to the datastore but came from a", "# file, refuse to overwrite.", "details", "=", "\"system artifact cannot be overwritten\"", "raise", "rdf_artifacts", ".", "ArtifactDefinitionError", "(", "artifact_name", ",", "details", ")", "# Preserve where the artifact was loaded from to help debugging.", "artifact_rdfvalue", ".", "loaded_from", "=", "source", "# Clear any stale errors.", "artifact_rdfvalue", ".", "error_message", "=", "None", "self", ".", "_artifacts", "[", "artifact_rdfvalue", ".", "name", "]", "=", "artifact_rdfvalue" ]
48.791667
15.166667
def permissions(cls, instance, db_session=None): """ returns all non-resource permissions based on what groups user belongs and directly set ones for this user :param instance: :param db_session: :return: """ db_session = get_db_session(db_session, instance) query = db_session.query( cls.models_proxy.GroupPermission.group_id.label("owner_id"), cls.models_proxy.GroupPermission.perm_name.label("perm_name"), sa.literal("group").label("type"), ) query = query.filter( cls.models_proxy.GroupPermission.group_id == cls.models_proxy.UserGroup.group_id ) query = query.filter( cls.models_proxy.User.id == cls.models_proxy.UserGroup.user_id ) query = query.filter(cls.models_proxy.User.id == instance.id) query2 = db_session.query( cls.models_proxy.UserPermission.user_id.label("owner_id"), cls.models_proxy.UserPermission.perm_name.label("perm_name"), sa.literal("user").label("type"), ) query2 = query2.filter(cls.models_proxy.UserPermission.user_id == instance.id) query = query.union(query2) groups_dict = dict([(g.id, g) for g in instance.groups]) return [ PermissionTuple( instance, row.perm_name, row.type, groups_dict.get(row.owner_id) if row.type == "group" else None, None, False, True, ) for row in query ]
[ "def", "permissions", "(", "cls", ",", "instance", ",", "db_session", "=", "None", ")", ":", "db_session", "=", "get_db_session", "(", "db_session", ",", "instance", ")", "query", "=", "db_session", ".", "query", "(", "cls", ".", "models_proxy", ".", "GroupPermission", ".", "group_id", ".", "label", "(", "\"owner_id\"", ")", ",", "cls", ".", "models_proxy", ".", "GroupPermission", ".", "perm_name", ".", "label", "(", "\"perm_name\"", ")", ",", "sa", ".", "literal", "(", "\"group\"", ")", ".", "label", "(", "\"type\"", ")", ",", ")", "query", "=", "query", ".", "filter", "(", "cls", ".", "models_proxy", ".", "GroupPermission", ".", "group_id", "==", "cls", ".", "models_proxy", ".", "UserGroup", ".", "group_id", ")", "query", "=", "query", ".", "filter", "(", "cls", ".", "models_proxy", ".", "User", ".", "id", "==", "cls", ".", "models_proxy", ".", "UserGroup", ".", "user_id", ")", "query", "=", "query", ".", "filter", "(", "cls", ".", "models_proxy", ".", "User", ".", "id", "==", "instance", ".", "id", ")", "query2", "=", "db_session", ".", "query", "(", "cls", ".", "models_proxy", ".", "UserPermission", ".", "user_id", ".", "label", "(", "\"owner_id\"", ")", ",", "cls", ".", "models_proxy", ".", "UserPermission", ".", "perm_name", ".", "label", "(", "\"perm_name\"", ")", ",", "sa", ".", "literal", "(", "\"user\"", ")", ".", "label", "(", "\"type\"", ")", ",", ")", "query2", "=", "query2", ".", "filter", "(", "cls", ".", "models_proxy", ".", "UserPermission", ".", "user_id", "==", "instance", ".", "id", ")", "query", "=", "query", ".", "union", "(", "query2", ")", "groups_dict", "=", "dict", "(", "[", "(", "g", ".", "id", ",", "g", ")", "for", "g", "in", "instance", ".", "groups", "]", ")", "return", "[", "PermissionTuple", "(", "instance", ",", "row", ".", "perm_name", ",", "row", ".", "type", ",", "groups_dict", ".", "get", "(", "row", ".", "owner_id", ")", "if", "row", ".", "type", "==", "\"group\"", "else", "None", ",", "None", ",", "False", ",", "True", ",", ")", "for", "row", "in", "query", "]" ]
36.522727
20.568182
def find_by_id(cls, id): """ Finds a single document by its ID. Throws a NotFoundException if the document does not exist (the assumption being if you've got an id you should be pretty certain the thing exists) """ obj = cls.find_one(cls._id_spec(id)) if not obj: raise NotFoundException(cls.collection, id) return obj
[ "def", "find_by_id", "(", "cls", ",", "id", ")", ":", "obj", "=", "cls", ".", "find_one", "(", "cls", ".", "_id_spec", "(", "id", ")", ")", "if", "not", "obj", ":", "raise", "NotFoundException", "(", "cls", ".", "collection", ",", "id", ")", "return", "obj" ]
35.636364
11.636364
def save(self): """Saves all model instances in the batch as model. """ saved = 0 if not self.objects: raise BatchError("Save failed. Batch is empty") for deserialized_tx in self.objects: try: self.model.objects.get(pk=deserialized_tx.pk) except self.model.DoesNotExist: data = {} for field in self.model._meta.get_fields(): try: data.update({field.name: getattr(deserialized_tx, field.name)}) except AttributeError: pass self.model.objects.create(**data) saved += 1 return saved
[ "def", "save", "(", "self", ")", ":", "saved", "=", "0", "if", "not", "self", ".", "objects", ":", "raise", "BatchError", "(", "\"Save failed. Batch is empty\"", ")", "for", "deserialized_tx", "in", "self", ".", "objects", ":", "try", ":", "self", ".", "model", ".", "objects", ".", "get", "(", "pk", "=", "deserialized_tx", ".", "pk", ")", "except", "self", ".", "model", ".", "DoesNotExist", ":", "data", "=", "{", "}", "for", "field", "in", "self", ".", "model", ".", "_meta", ".", "get_fields", "(", ")", ":", "try", ":", "data", ".", "update", "(", "{", "field", ".", "name", ":", "getattr", "(", "deserialized_tx", ",", "field", ".", "name", ")", "}", ")", "except", "AttributeError", ":", "pass", "self", ".", "model", ".", "objects", ".", "create", "(", "*", "*", "data", ")", "saved", "+=", "1", "return", "saved" ]
37.526316
15
def load_config(settings): '''Load settings from configfile''' config = ConfigParser() section = 'pgdocgen' try: config.read(settings['configfile']) except Exception as e: sys.stderr.write('Failed to read config: ' + str(e)) sys.exit(1) for option in config.options(section): settings[option] = config.get(section, option) return settings
[ "def", "load_config", "(", "settings", ")", ":", "config", "=", "ConfigParser", "(", ")", "section", "=", "'pgdocgen'", "try", ":", "config", ".", "read", "(", "settings", "[", "'configfile'", "]", ")", "except", "Exception", "as", "e", ":", "sys", ".", "stderr", ".", "write", "(", "'Failed to read config: '", "+", "str", "(", "e", ")", ")", "sys", ".", "exit", "(", "1", ")", "for", "option", "in", "config", ".", "options", "(", "section", ")", ":", "settings", "[", "option", "]", "=", "config", ".", "get", "(", "section", ",", "option", ")", "return", "settings" ]
32.25
14.25
def remove(self, entity): """ Remove entity from the MatchBox. :param object entity: """ empty_traits = set() self.mismatch_unknown.discard(entity) for trait, entities in self.index.items(): entities.discard(entity) if not entities: empty_traits.add(trait) for empty_trait in empty_traits: del self.index[empty_trait]
[ "def", "remove", "(", "self", ",", "entity", ")", ":", "empty_traits", "=", "set", "(", ")", "self", ".", "mismatch_unknown", ".", "discard", "(", "entity", ")", "for", "trait", ",", "entities", "in", "self", ".", "index", ".", "items", "(", ")", ":", "entities", ".", "discard", "(", "entity", ")", "if", "not", "entities", ":", "empty_traits", ".", "add", "(", "trait", ")", "for", "empty_trait", "in", "empty_traits", ":", "del", "self", ".", "index", "[", "empty_trait", "]" ]
28.066667
10.066667
def cformat(msg, reset=True, template=ColorTemplate): """ Transform msg so that colors e.g. #RED;, #BLUE;, etc are mapped to the corresponding ANSI escape codes. e.g. >>> cformat("This is #RED;a red string.") 'This is \\x1b[31ma red string.\\x1b[0m' >>> cformat("This is #BLUE;a blue string.", reset=False) 'This is \\x1b[34ma blue string.' >>> cformat("This is #xBLUE;a blue background.", reset=False) 'This is \\x1b[44ma blue background.' The returned string is escaped unless reset=False """ ct = ColorTemplate(msg) m = ct.safe_substitute(ANSI_STRING_MAP) if reset: m += ANSI_STRING_MAP['RESET;'] return m
[ "def", "cformat", "(", "msg", ",", "reset", "=", "True", ",", "template", "=", "ColorTemplate", ")", ":", "ct", "=", "ColorTemplate", "(", "msg", ")", "m", "=", "ct", ".", "safe_substitute", "(", "ANSI_STRING_MAP", ")", "if", "reset", ":", "m", "+=", "ANSI_STRING_MAP", "[", "'RESET;'", "]", "return", "m" ]
28.695652
18.782609
def _render_trajectories(self, trajectories: Tuple[NonFluents, Fluents, Fluents, Fluents, np.array]) -> None: '''Prints the first batch of simulated `trajectories`. Args: trajectories: NonFluents, states, actions, interms and rewards. ''' if self._verbose: non_fluents, initial_state, states, actions, interms, rewards = trajectories shape = states[0][1].shape batch_size, horizon, = shape[0], shape[1] states = [(s[0], s[1][0]) for s in states] interms = [(f[0], f[1][0]) for f in interms] actions = [(a[0], a[1][0]) for a in actions] rewards = np.reshape(rewards, [batch_size, horizon])[0] self._render_batch(non_fluents, states, actions, interms, rewards)
[ "def", "_render_trajectories", "(", "self", ",", "trajectories", ":", "Tuple", "[", "NonFluents", ",", "Fluents", ",", "Fluents", ",", "Fluents", ",", "np", ".", "array", "]", ")", "->", "None", ":", "if", "self", ".", "_verbose", ":", "non_fluents", ",", "initial_state", ",", "states", ",", "actions", ",", "interms", ",", "rewards", "=", "trajectories", "shape", "=", "states", "[", "0", "]", "[", "1", "]", ".", "shape", "batch_size", ",", "horizon", ",", "=", "shape", "[", "0", "]", ",", "shape", "[", "1", "]", "states", "=", "[", "(", "s", "[", "0", "]", ",", "s", "[", "1", "]", "[", "0", "]", ")", "for", "s", "in", "states", "]", "interms", "=", "[", "(", "f", "[", "0", "]", ",", "f", "[", "1", "]", "[", "0", "]", ")", "for", "f", "in", "interms", "]", "actions", "=", "[", "(", "a", "[", "0", "]", ",", "a", "[", "1", "]", "[", "0", "]", ")", "for", "a", "in", "actions", "]", "rewards", "=", "np", ".", "reshape", "(", "rewards", ",", "[", "batch_size", ",", "horizon", "]", ")", "[", "0", "]", "self", ".", "_render_batch", "(", "non_fluents", ",", "states", ",", "actions", ",", "interms", ",", "rewards", ")" ]
49.75
25.125
def nearest(self): ''' Get the next state jump. The next jump is calculated looking at :attr:`current` state and its possible :attr:`jumps` to find the nearest and bigger option in :attr:`pending` data. If none is found, the returned next state label will be None. :returns: tuple with index, substring and next state label :rtype: tuple ''' try: options = self.jumps[self.current] except KeyError: raise KeyError( 'Current state %r not defined in %s.jumps.' % (self.current, self.__class__) ) offset = len(self.start) index = len(self.pending) if self.streaming: index -= max(map(len, options)) key = (index, 1) result = (index, '', None) for amark, anext in options.items(): asize = len(amark) aindex = self.pending.find(amark, offset, index + asize) if aindex > -1: index = aindex akey = (aindex, -asize) if akey < key: key = akey result = (aindex, amark, anext) return result
[ "def", "nearest", "(", "self", ")", ":", "try", ":", "options", "=", "self", ".", "jumps", "[", "self", ".", "current", "]", "except", "KeyError", ":", "raise", "KeyError", "(", "'Current state %r not defined in %s.jumps.'", "%", "(", "self", ".", "current", ",", "self", ".", "__class__", ")", ")", "offset", "=", "len", "(", "self", ".", "start", ")", "index", "=", "len", "(", "self", ".", "pending", ")", "if", "self", ".", "streaming", ":", "index", "-=", "max", "(", "map", "(", "len", ",", "options", ")", ")", "key", "=", "(", "index", ",", "1", ")", "result", "=", "(", "index", ",", "''", ",", "None", ")", "for", "amark", ",", "anext", "in", "options", ".", "items", "(", ")", ":", "asize", "=", "len", "(", "amark", ")", "aindex", "=", "self", ".", "pending", ".", "find", "(", "amark", ",", "offset", ",", "index", "+", "asize", ")", "if", "aindex", ">", "-", "1", ":", "index", "=", "aindex", "akey", "=", "(", "aindex", ",", "-", "asize", ")", "if", "akey", "<", "key", ":", "key", "=", "akey", "result", "=", "(", "aindex", ",", "amark", ",", "anext", ")", "return", "result" ]
33.305556
17.305556
def _smooth(values: List[float], beta: float) -> List[float]: """ Exponential smoothing of values """ avg_value = 0. smoothed = [] for i, value in enumerate(values): avg_value = beta * avg_value + (1 - beta) * value smoothed.append(avg_value / (1 - beta ** (i + 1))) return smoothed
[ "def", "_smooth", "(", "values", ":", "List", "[", "float", "]", ",", "beta", ":", "float", ")", "->", "List", "[", "float", "]", ":", "avg_value", "=", "0.", "smoothed", "=", "[", "]", "for", "i", ",", "value", "in", "enumerate", "(", "values", ")", ":", "avg_value", "=", "beta", "*", "avg_value", "+", "(", "1", "-", "beta", ")", "*", "value", "smoothed", ".", "append", "(", "avg_value", "/", "(", "1", "-", "beta", "**", "(", "i", "+", "1", ")", ")", ")", "return", "smoothed" ]
38.875
15.5
def is_spam(request, form, url): """ Main entry point for spam handling - called from the comment view and page processor for ``yacms.forms``, to check if posted content is spam. Spam filters are configured via the ``SPAM_FILTERS`` setting. """ for spam_filter_path in settings.SPAM_FILTERS: spam_filter = import_dotted_path(spam_filter_path) if spam_filter(request, form, url): return True
[ "def", "is_spam", "(", "request", ",", "form", ",", "url", ")", ":", "for", "spam_filter_path", "in", "settings", ".", "SPAM_FILTERS", ":", "spam_filter", "=", "import_dotted_path", "(", "spam_filter_path", ")", "if", "spam_filter", "(", "request", ",", "form", ",", "url", ")", ":", "return", "True" ]
43.3
14.9
def srepr(expr, indented=False, cache=None): """Render the given expression into a string that can be evaluated in an appropriate context to re-instantiate an identical expression. If `indented` is False (default), the resulting string is a single line. Otherwise, the result is a multiline string, and each positional and keyword argument of each `Expression` is on a separate line, recursively indented to produce a tree-like output. The `cache` may be used to generate more readable expressions. Example: >>> hs = LocalSpace('1') >>> A = OperatorSymbol('A', hs=hs); B = OperatorSymbol('B', hs=hs) >>> expr = A + B >>> srepr(expr) "OperatorPlus(OperatorSymbol('A', hs=LocalSpace('1')), OperatorSymbol('B', hs=LocalSpace('1')))" >>> eval(srepr(expr)) == expr True >>> srepr(expr, cache={hs:'hs'}) "OperatorPlus(OperatorSymbol('A', hs=hs), OperatorSymbol('B', hs=hs))" >>> eval(srepr(expr, cache={hs:'hs'})) == expr True >>> print(srepr(expr, indented=True)) OperatorPlus( OperatorSymbol( 'A', hs=LocalSpace( '1')), OperatorSymbol( 'B', hs=LocalSpace( '1'))) >>> eval(srepr(expr, indented=True)) == expr True See also: :func:`~qnet.printing.tree.print_tree`, respectively :func:`qnet.printing.tree.tree`, produces an output similar to the indented :func:`srepr`, for interactive use. Their result cannot be evaluated and the exact output depends on :func:`init_printing`. :func:`~qnet.printing.dot.dotprint` provides a way to graphically explore the tree structure of an expression. """ if indented: printer = IndentedSReprPrinter(cache=cache) else: printer = QnetSReprPrinter(cache=cache) return printer.doprint(expr)
[ "def", "srepr", "(", "expr", ",", "indented", "=", "False", ",", "cache", "=", "None", ")", ":", "if", "indented", ":", "printer", "=", "IndentedSReprPrinter", "(", "cache", "=", "cache", ")", "else", ":", "printer", "=", "QnetSReprPrinter", "(", "cache", "=", "cache", ")", "return", "printer", ".", "doprint", "(", "expr", ")" ]
38.9
21.2
async def _write_ssl(self): """ Flush outgoing data which OpenSSL put in our BIO to the transport. """ pending = lib.BIO_ctrl_pending(self.write_bio) if pending > 0: result = lib.BIO_read(self.write_bio, self.write_cdata, len(self.write_cdata)) await self.transport._send(ffi.buffer(self.write_cdata)[0:result]) self.__tx_bytes += result self.__tx_packets += 1
[ "async", "def", "_write_ssl", "(", "self", ")", ":", "pending", "=", "lib", ".", "BIO_ctrl_pending", "(", "self", ".", "write_bio", ")", "if", "pending", ">", "0", ":", "result", "=", "lib", ".", "BIO_read", "(", "self", ".", "write_bio", ",", "self", ".", "write_cdata", ",", "len", "(", "self", ".", "write_cdata", ")", ")", "await", "self", ".", "transport", ".", "_send", "(", "ffi", ".", "buffer", "(", "self", ".", "write_cdata", ")", "[", "0", ":", "result", "]", ")", "self", ".", "__tx_bytes", "+=", "result", "self", ".", "__tx_packets", "+=", "1" ]
43.9
17.5
def get_dead_hosting_devices_info(self): """ Get a list of hosting devices that have been marked dead :return: List of dead hosting device ids """ res = [] for hd_id in self.hosting_devices_backlog: hd = self.hosting_devices_backlog[hd_id]['hd'] if hd['hd_state'] == cc.HD_DEAD: res.append(hd['id']) return res
[ "def", "get_dead_hosting_devices_info", "(", "self", ")", ":", "res", "=", "[", "]", "for", "hd_id", "in", "self", ".", "hosting_devices_backlog", ":", "hd", "=", "self", ".", "hosting_devices_backlog", "[", "hd_id", "]", "[", "'hd'", "]", "if", "hd", "[", "'hd_state'", "]", "==", "cc", ".", "HD_DEAD", ":", "res", ".", "append", "(", "hd", "[", "'id'", "]", ")", "return", "res" ]
36
10.363636
def is_valid_pid_to_be_updated(did): """Assert that ``did`` is the PID of an object that can be updated (obsoleted) with MNStorage.update()""" if not d1_gmn.app.did.is_valid_pid_to_be_updated(did): raise d1_common.types.exceptions.InvalidRequest( 0, 'Object cannot be updated because the identifier for the object to be ' 'updated is {}. did="{}"'.format( d1_gmn.app.did.classify_identifier(did), did ), identifier=did, )
[ "def", "is_valid_pid_to_be_updated", "(", "did", ")", ":", "if", "not", "d1_gmn", ".", "app", ".", "did", ".", "is_valid_pid_to_be_updated", "(", "did", ")", ":", "raise", "d1_common", ".", "types", ".", "exceptions", ".", "InvalidRequest", "(", "0", ",", "'Object cannot be updated because the identifier for the object to be '", "'updated is {}. did=\"{}\"'", ".", "format", "(", "d1_gmn", ".", "app", ".", "did", ".", "classify_identifier", "(", "did", ")", ",", "did", ")", ",", "identifier", "=", "did", ",", ")" ]
42.833333
16.833333
def check_valid(msg_type, attr, value, func, exec_info): """ Checker function all validate_* functions below will call. Raises InvalidMessageInputError if input is not valid as per given func. """ if value is not None: if isinstance(value, MutableSequence): for v in value: if not func(v): raise InvalidMessageInputError(msg_type, attr, value, exec_info) else: if not func(value): raise InvalidMessageInputError(msg_type, attr, value, exec_info)
[ "def", "check_valid", "(", "msg_type", ",", "attr", ",", "value", ",", "func", ",", "exec_info", ")", ":", "if", "value", "is", "not", "None", ":", "if", "isinstance", "(", "value", ",", "MutableSequence", ")", ":", "for", "v", "in", "value", ":", "if", "not", "func", "(", "v", ")", ":", "raise", "InvalidMessageInputError", "(", "msg_type", ",", "attr", ",", "value", ",", "exec_info", ")", "else", ":", "if", "not", "func", "(", "value", ")", ":", "raise", "InvalidMessageInputError", "(", "msg_type", ",", "attr", ",", "value", ",", "exec_info", ")" ]
39.142857
17.857143
def interval(coro, interval=1, times=None, loop=None): """ Schedules the execution of a coroutine function every `x` amount of seconds. The function returns an `asyncio.Task`, which implements also an `asyncio.Future` interface, allowing the user to cancel the execution cycle. This function can be used as decorator. Arguments: coro (coroutinefunction): coroutine function to defer. interval (int/float): number of seconds to repeat the coroutine execution. times (int): optional maximum time of executions. Infinite by default. loop (asyncio.BaseEventLoop, optional): loop to run. Defaults to asyncio.get_event_loop(). Raises: TypeError: if coro argument is not a coroutine function. Returns: future (asyncio.Task): coroutine wrapped as task future. Useful for cancellation and state checking. Usage:: # Usage as function future = paco.interval(coro, 1) # Cancel it after a while... await asyncio.sleep(5) future.cancel() # Usage as decorator @paco.interval(10) async def metrics(): await send_metrics() future = await metrics() """ assert_corofunction(coro=coro) # Store maximum allowed number of calls times = int(times or 0) or float('inf') @asyncio.coroutine def schedule(times, *args, **kw): while times > 0: # Decrement times counter times -= 1 # Schedule coroutine yield from coro(*args, **kw) yield from asyncio.sleep(interval) def wrapper(*args, **kw): return ensure_future(schedule(times, *args, **kw), loop=loop) return wrapper
[ "def", "interval", "(", "coro", ",", "interval", "=", "1", ",", "times", "=", "None", ",", "loop", "=", "None", ")", ":", "assert_corofunction", "(", "coro", "=", "coro", ")", "# Store maximum allowed number of calls", "times", "=", "int", "(", "times", "or", "0", ")", "or", "float", "(", "'inf'", ")", "@", "asyncio", ".", "coroutine", "def", "schedule", "(", "times", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "while", "times", ">", "0", ":", "# Decrement times counter", "times", "-=", "1", "# Schedule coroutine", "yield", "from", "coro", "(", "*", "args", ",", "*", "*", "kw", ")", "yield", "from", "asyncio", ".", "sleep", "(", "interval", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kw", ")", ":", "return", "ensure_future", "(", "schedule", "(", "times", ",", "*", "args", ",", "*", "*", "kw", ")", ",", "loop", "=", "loop", ")", "return", "wrapper" ]
27.66129
22.016129
def timedeltaToString(delta): """ Convert timedelta to an ical DURATION. """ if delta.days == 0: sign = 1 else: sign = delta.days / abs(delta.days) delta = abs(delta) days = delta.days hours = int(delta.seconds / 3600) minutes = int((delta.seconds % 3600) / 60) seconds = int(delta.seconds % 60) output = '' if sign == -1: output += '-' output += 'P' if days: output += '{}D'.format(days) if hours or minutes or seconds: output += 'T' elif not days: # Deal with zero duration output += 'T0S' if hours: output += '{}H'.format(hours) if minutes: output += '{}M'.format(minutes) if seconds: output += '{}S'.format(seconds) return output
[ "def", "timedeltaToString", "(", "delta", ")", ":", "if", "delta", ".", "days", "==", "0", ":", "sign", "=", "1", "else", ":", "sign", "=", "delta", ".", "days", "/", "abs", "(", "delta", ".", "days", ")", "delta", "=", "abs", "(", "delta", ")", "days", "=", "delta", ".", "days", "hours", "=", "int", "(", "delta", ".", "seconds", "/", "3600", ")", "minutes", "=", "int", "(", "(", "delta", ".", "seconds", "%", "3600", ")", "/", "60", ")", "seconds", "=", "int", "(", "delta", ".", "seconds", "%", "60", ")", "output", "=", "''", "if", "sign", "==", "-", "1", ":", "output", "+=", "'-'", "output", "+=", "'P'", "if", "days", ":", "output", "+=", "'{}D'", ".", "format", "(", "days", ")", "if", "hours", "or", "minutes", "or", "seconds", ":", "output", "+=", "'T'", "elif", "not", "days", ":", "# Deal with zero duration", "output", "+=", "'T0S'", "if", "hours", ":", "output", "+=", "'{}H'", ".", "format", "(", "hours", ")", "if", "minutes", ":", "output", "+=", "'{}M'", ".", "format", "(", "minutes", ")", "if", "seconds", ":", "output", "+=", "'{}S'", ".", "format", "(", "seconds", ")", "return", "output" ]
24.419355
14.483871
def interpolate_exe(self, testString): """ Replace testString with a path to an executable based on the format. If this looks like ${which:lalapps_tmpltbank} it will return the equivalent of which(lalapps_tmpltbank) Otherwise it will return an unchanged string. Parameters ----------- testString : string The input string Returns -------- newString : string The output string. """ # First check if any interpolation is needed and abort if not testString = testString.strip() if not (testString.startswith('${') and testString.endswith('}')): return testString # This may not be an exe interpolation, so even if it has ${XXX} form # I may not have to do anything newString = testString # Strip the ${ and } testString = testString[2:-1] testList = testString.split(':') # Maybe we can add a few different possibilities for substitution if len(testList) == 2: if testList[0] == 'which': newString = distutils.spawn.find_executable(testList[1]) if not newString: errmsg = "Cannot find exe %s in your path " %(testList[1]) errmsg += "and you specified ${which:%s}." %(testList[1]) raise ValueError(errmsg) return newString
[ "def", "interpolate_exe", "(", "self", ",", "testString", ")", ":", "# First check if any interpolation is needed and abort if not", "testString", "=", "testString", ".", "strip", "(", ")", "if", "not", "(", "testString", ".", "startswith", "(", "'${'", ")", "and", "testString", ".", "endswith", "(", "'}'", ")", ")", ":", "return", "testString", "# This may not be an exe interpolation, so even if it has ${XXX} form", "# I may not have to do anything", "newString", "=", "testString", "# Strip the ${ and }", "testString", "=", "testString", "[", "2", ":", "-", "1", "]", "testList", "=", "testString", ".", "split", "(", "':'", ")", "# Maybe we can add a few different possibilities for substitution", "if", "len", "(", "testList", ")", "==", "2", ":", "if", "testList", "[", "0", "]", "==", "'which'", ":", "newString", "=", "distutils", ".", "spawn", ".", "find_executable", "(", "testList", "[", "1", "]", ")", "if", "not", "newString", ":", "errmsg", "=", "\"Cannot find exe %s in your path \"", "%", "(", "testList", "[", "1", "]", ")", "errmsg", "+=", "\"and you specified ${which:%s}.\"", "%", "(", "testList", "[", "1", "]", ")", "raise", "ValueError", "(", "errmsg", ")", "return", "newString" ]
30.956522
21.608696
def build(self, filenames, cl_args=None, link_args=None, x64=False, out_dir=''): """ Compile source files and link object files. """ if not cl_args: cl_args = [] if not link_args: link_args = [] msvc, lib = self.vc.get_bin_and_lib(x64) lib = self.make_lib(lib) if out_dir: cl_args.append('/Fo:' + out_dir + '\\') include = self.make_inc(self.vc.inc + self.sdk.inc) cl_args.extend(include + filenames) try: msvc.run_cl('/c', *cl_args) except CalledProcessError as error: logging.error(_('failed to compile: %s'), filenames) logging.error(_('cl.exe returned:\n%s'), error.output) return False link_args.extend(lib + self.make_objs(filenames, out_dir)) try: msvc.run_link(*link_args) except CalledProcessError as error: logging.error(_('failed to link: %s'), filenames) logging.error(_('link.exe returned:\n%s'), error.output) return False return True
[ "def", "build", "(", "self", ",", "filenames", ",", "cl_args", "=", "None", ",", "link_args", "=", "None", ",", "x64", "=", "False", ",", "out_dir", "=", "''", ")", ":", "if", "not", "cl_args", ":", "cl_args", "=", "[", "]", "if", "not", "link_args", ":", "link_args", "=", "[", "]", "msvc", ",", "lib", "=", "self", ".", "vc", ".", "get_bin_and_lib", "(", "x64", ")", "lib", "=", "self", ".", "make_lib", "(", "lib", ")", "if", "out_dir", ":", "cl_args", ".", "append", "(", "'/Fo:'", "+", "out_dir", "+", "'\\\\'", ")", "include", "=", "self", ".", "make_inc", "(", "self", ".", "vc", ".", "inc", "+", "self", ".", "sdk", ".", "inc", ")", "cl_args", ".", "extend", "(", "include", "+", "filenames", ")", "try", ":", "msvc", ".", "run_cl", "(", "'/c'", ",", "*", "cl_args", ")", "except", "CalledProcessError", "as", "error", ":", "logging", ".", "error", "(", "_", "(", "'failed to compile: %s'", ")", ",", "filenames", ")", "logging", ".", "error", "(", "_", "(", "'cl.exe returned:\\n%s'", ")", ",", "error", ".", "output", ")", "return", "False", "link_args", ".", "extend", "(", "lib", "+", "self", ".", "make_objs", "(", "filenames", ",", "out_dir", ")", ")", "try", ":", "msvc", ".", "run_link", "(", "*", "link_args", ")", "except", "CalledProcessError", "as", "error", ":", "logging", ".", "error", "(", "_", "(", "'failed to link: %s'", ")", ",", "filenames", ")", "logging", ".", "error", "(", "_", "(", "'link.exe returned:\\n%s'", ")", ",", "error", ".", "output", ")", "return", "False", "return", "True" ]
37.724138
14
def set_annotation(self): """Appends the context's ``pending_symbol`` to its ``annotations`` sequence.""" assert self.pending_symbol is not None assert not self.value annotations = (_as_symbol(self.pending_symbol, is_symbol_value=False),) # pending_symbol becomes an annotation self.annotations = annotations if not self.annotations else self.annotations + annotations self.ion_type = None self.pending_symbol = None # reset pending symbol self.quoted_text = False self.line_comment = False self.is_self_delimiting = False return self
[ "def", "set_annotation", "(", "self", ")", ":", "assert", "self", ".", "pending_symbol", "is", "not", "None", "assert", "not", "self", ".", "value", "annotations", "=", "(", "_as_symbol", "(", "self", ".", "pending_symbol", ",", "is_symbol_value", "=", "False", ")", ",", ")", "# pending_symbol becomes an annotation", "self", ".", "annotations", "=", "annotations", "if", "not", "self", ".", "annotations", "else", "self", ".", "annotations", "+", "annotations", "self", ".", "ion_type", "=", "None", "self", ".", "pending_symbol", "=", "None", "# reset pending symbol", "self", ".", "quoted_text", "=", "False", "self", ".", "line_comment", "=", "False", "self", ".", "is_self_delimiting", "=", "False", "return", "self" ]
51.083333
19.666667
def get_run_as_identifiers_stack(self): """ :returns: an IdentifierCollection """ session = self.get_session(False) try: return session.get_internal_attribute(self.run_as_identifiers_session_key) except AttributeError: return None
[ "def", "get_run_as_identifiers_stack", "(", "self", ")", ":", "session", "=", "self", ".", "get_session", "(", "False", ")", "try", ":", "return", "session", ".", "get_internal_attribute", "(", "self", ".", "run_as_identifiers_session_key", ")", "except", "AttributeError", ":", "return", "None" ]
29.4
14.4
def toint(number): """ Helper to return rounded int for a float or just the int it self. """ if isinstance(number, float): if number > 1: number = round(number, 0) else: # The following solves when image has small dimensions (like 1x54) # then scale factor 1 * 0.296296 and `number` will store `0` # that will later raise ZeroDivisionError. number = round(math.ceil(number), 0) return int(number)
[ "def", "toint", "(", "number", ")", ":", "if", "isinstance", "(", "number", ",", "float", ")", ":", "if", "number", ">", "1", ":", "number", "=", "round", "(", "number", ",", "0", ")", "else", ":", "# The following solves when image has small dimensions (like 1x54)", "# then scale factor 1 * 0.296296 and `number` will store `0`", "# that will later raise ZeroDivisionError.", "number", "=", "round", "(", "math", ".", "ceil", "(", "number", ")", ",", "0", ")", "return", "int", "(", "number", ")" ]
36.923077
16.615385
def get_nlp_base(self): ''' getter ''' if isinstance(self.__nlp_base, NlpBase) is False: raise TypeError("The type of self.__nlp_base must be NlpBase.") return self.__nlp_base
[ "def", "get_nlp_base", "(", "self", ")", ":", "if", "isinstance", "(", "self", ".", "__nlp_base", ",", "NlpBase", ")", "is", "False", ":", "raise", "TypeError", "(", "\"The type of self.__nlp_base must be NlpBase.\"", ")", "return", "self", ".", "__nlp_base" ]
34.5
22.833333
def from_zenity_tuple_str(zenity_tuple_str: str): """ Parser for Zenity output, which outputs a named tuple-like string: "rgb(R, G, B)", where R, G, B are base10 integers. @param zenity_tuple_str: tuple-like string: "rgb(r, g, b), where r, g, b are base10 integers. @return: ColourData instance @rtype: ColourData """ components = zenity_tuple_str.strip("rgb()").split(",") return ColourData(*map(int, components))
[ "def", "from_zenity_tuple_str", "(", "zenity_tuple_str", ":", "str", ")", ":", "components", "=", "zenity_tuple_str", ".", "strip", "(", "\"rgb()\"", ")", ".", "split", "(", "\",\"", ")", "return", "ColourData", "(", "*", "map", "(", "int", ",", "components", ")", ")" ]
47.7
21.7
def register_library_type(name, type_): """ Register a Arctic Library Type handler """ if name in LIBRARY_TYPES: raise ArcticException("Library %s already registered as %s" % (name, LIBRARY_TYPES[name])) LIBRARY_TYPES[name] = type_
[ "def", "register_library_type", "(", "name", ",", "type_", ")", ":", "if", "name", "in", "LIBRARY_TYPES", ":", "raise", "ArcticException", "(", "\"Library %s already registered as %s\"", "%", "(", "name", ",", "LIBRARY_TYPES", "[", "name", "]", ")", ")", "LIBRARY_TYPES", "[", "name", "]", "=", "type_" ]
36.142857
11.571429
def option_changed(self, option, value): """Option has changed""" setattr(self, to_text_string(option), value) self.shellwidget.set_namespace_view_settings() self.refresh_table()
[ "def", "option_changed", "(", "self", ",", "option", ",", "value", ")", ":", "setattr", "(", "self", ",", "to_text_string", "(", "option", ")", ",", "value", ")", "self", ".", "shellwidget", ".", "set_namespace_view_settings", "(", ")", "self", ".", "refresh_table", "(", ")" ]
42
8.2
def truncate(message, limit=500): """ Truncates the message to the given limit length. The beginning and the end of the message are left untouched. """ if len(message) > limit: trc_msg = ''.join([message[:limit // 2 - 2], ' .. ', message[len(message) - limit // 2 + 2:]]) else: trc_msg = message return trc_msg
[ "def", "truncate", "(", "message", ",", "limit", "=", "500", ")", ":", "if", "len", "(", "message", ")", ">", "limit", ":", "trc_msg", "=", "''", ".", "join", "(", "[", "message", "[", ":", "limit", "//", "2", "-", "2", "]", ",", "' .. '", ",", "message", "[", "len", "(", "message", ")", "-", "limit", "//", "2", "+", "2", ":", "]", "]", ")", "else", ":", "trc_msg", "=", "message", "return", "trc_msg" ]
33.083333
14.083333
def fourier_fit_magseries(times, mags, errs, period, fourierorder=None, fourierparams=None, sigclip=3.0, magsarefluxes=False, plotfit=False, ignoreinitfail=True, verbose=True): '''This fits a Fourier series to a mag/flux time series. Parameters ---------- times,mags,errs : np.array The input mag/flux time-series to fit a Fourier cosine series to. period : float The period to use for the Fourier fit. fourierorder : None or int If this is an int, will be interpreted as the Fourier order of the series to fit to the input mag/flux times-series. If this is None and `fourierparams` is specified, `fourierparams` will be used directly to generate the fit Fourier series. If `fourierparams` is also None, this function will try to fit a Fourier cosine series of order 3 to the mag/flux time-series. fourierparams : list of floats or None If this is specified as a list of floats, it must be of the form below:: [fourier_amp1, fourier_amp2, fourier_amp3,...,fourier_ampN, fourier_phase1, fourier_phase2, fourier_phase3,...,fourier_phaseN] to specify a Fourier cosine series of order N. If this is None and `fourierorder` is specified, the Fourier order specified there will be used to construct the Fourier cosine series used to fit the input mag/flux time-series. If both are None, this function will try to fit a Fourier cosine series of order 3 to the input mag/flux time-series. sigclip : float or int or sequence of two floats/ints or None If a single float or int, a symmetric sigma-clip will be performed using the number provided as the sigma-multiplier to cut out from the input time-series. If a list of two ints/floats is provided, the function will perform an 'asymmetric' sigma-clip. The first element in this list is the sigma value to use for fainter flux/mag values; the second element in this list is the sigma value to use for brighter flux/mag values. For example, `sigclip=[10., 3.]`, will sigclip out greater than 10-sigma dimmings and greater than 3-sigma brightenings. Here the meaning of "dimming" and "brightening" is set by *physics* (not the magnitude system), which is why the `magsarefluxes` kwarg must be correctly set. If `sigclip` is None, no sigma-clipping will be performed, and the time-series (with non-finite elems removed) will be passed through to the output. magsarefluxes : bool If True, will treat the input values of `mags` as fluxes for purposes of plotting the fit and sig-clipping. plotfit : str or False If this is a string, this function will make a plot for the fit to the mag/flux time-series and writes the plot to the path specified here. ignoreinitfail : bool If this is True, ignores the initial failure to find a set of optimized Fourier parameters using the global optimization function and proceeds to do a least-squares fit anyway. verbose : bool If True, will indicate progress and warn of any problems. Returns ------- dict This function returns a dict containing the model fit parameters, the minimized chi-sq value and the reduced chi-sq value. The form of this dict is mostly standardized across all functions in this module:: { 'fittype':'fourier', 'fitinfo':{ 'finalparams': the list of final model fit params, 'leastsqfit':the full tuple returned by scipy.leastsq, 'fitmags': the model fit mags, 'fitepoch': the epoch of minimum light for the fit, ... other fit function specific keys ... }, 'fitchisq': the minimized value of the fit's chi-sq, 'fitredchisq':the reduced chi-sq value, 'fitplotfile': the output fit plot if fitplot is not None, 'magseries':{ 'times':input times in phase order of the model, 'phase':the phases of the model mags, 'mags':input mags/fluxes in the phase order of the model, 'errs':errs in the phase order of the model, 'magsarefluxes':input value of magsarefluxes kwarg } } NOTE: the returned value of 'fitepoch' in the 'fitinfo' dict returned by this function is the time value of the first observation since this is where the LC is folded for the fit procedure. To get the actual time of minimum epoch as calculated by a spline fit to the phased LC, use the key 'actual_fitepoch' in the 'fitinfo' dict. ''' stimes, smags, serrs = sigclip_magseries(times, mags, errs, sigclip=sigclip, magsarefluxes=magsarefluxes) # get rid of zero errs nzind = npnonzero(serrs) stimes, smags, serrs = stimes[nzind], smags[nzind], serrs[nzind] phase, pmags, perrs, ptimes, mintime = ( get_phased_quantities(stimes, smags, serrs, period) ) # get the fourier order either from the scalar order kwarg... if fourierorder and fourierorder > 0 and not fourierparams: fourieramps = [0.6] + [0.2]*(fourierorder - 1) fourierphas = [0.1] + [0.1]*(fourierorder - 1) fourierparams = fourieramps + fourierphas # or from the fully specified coeffs vector elif not fourierorder and fourierparams: fourierorder = int(len(fourierparams)/2) else: LOGWARNING('specified both/neither Fourier order AND Fourier coeffs, ' 'using default Fourier order of 3') fourierorder = 3 fourieramps = [0.6] + [0.2]*(fourierorder - 1) fourierphas = [0.1] + [0.1]*(fourierorder - 1) fourierparams = fourieramps + fourierphas if verbose: LOGINFO('fitting Fourier series of order %s to ' 'mag series with %s observations, ' 'using period %.6f, folded at %.6f' % (fourierorder, len(phase), period, mintime)) # initial minimize call to find global minimum in chi-sq initialfit = spminimize(_fourier_chisq, fourierparams, method='BFGS', args=(phase, pmags, perrs)) # make sure this initial fit succeeds before proceeding if initialfit.success or ignoreinitfail: if verbose: LOGINFO('initial fit done, refining...') leastsqparams = initialfit.x try: leastsqfit = spleastsq(_fourier_residual, leastsqparams, args=(phase, pmags)) except Exception as e: leastsqfit = None # if the fit succeeded, then we can return the final parameters if leastsqfit and leastsqfit[-1] in (1,2,3,4): finalparams = leastsqfit[0] # calculate the chisq and reduced chisq fitmags = _fourier_func(finalparams, phase, pmags) fitchisq = npsum( ((fitmags - pmags)*(fitmags - pmags)) / (perrs*perrs) ) fitredchisq = fitchisq/(len(pmags) - len(finalparams) - 1) if verbose: LOGINFO( 'final fit done. chisq = %.5f, reduced chisq = %.5f' % (fitchisq,fitredchisq) ) # figure out the time of light curve minimum (i.e. the fit epoch) # this is when the fit mag is maximum (i.e. the faintest) # or if magsarefluxes = True, then this is when fit flux is minimum if not magsarefluxes: fitmagminind = npwhere(fitmags == npmax(fitmags)) else: fitmagminind = npwhere(fitmags == npmin(fitmags)) if len(fitmagminind[0]) > 1: fitmagminind = (fitmagminind[0][0],) # assemble the returndict returndict = { 'fittype':'fourier', 'fitinfo':{ 'fourierorder':fourierorder, 'finalparams':finalparams, 'initialfit':initialfit, 'leastsqfit':leastsqfit, 'fitmags':fitmags, 'fitepoch':mintime, 'actual_fitepoch':ptimes[fitmagminind] }, 'fitchisq':fitchisq, 'fitredchisq':fitredchisq, 'fitplotfile':None, 'magseries':{ 'times':ptimes, 'phase':phase, 'mags':pmags, 'errs':perrs, 'magsarefluxes':magsarefluxes }, } # make the fit plot if required if plotfit and isinstance(plotfit, str): make_fit_plot(phase, pmags, perrs, fitmags, period, mintime, mintime, plotfit, magsarefluxes=magsarefluxes) returndict['fitplotfile'] = plotfit return returndict # if the leastsq fit did not succeed, return Nothing else: LOGERROR('fourier-fit: least-squared fit to the light curve failed') return { 'fittype':'fourier', 'fitinfo':{ 'fourierorder':fourierorder, 'finalparams':None, 'initialfit':initialfit, 'leastsqfit':None, 'fitmags':None, 'fitepoch':None }, 'fitchisq':npnan, 'fitredchisq':npnan, 'fitplotfile':None, 'magseries':{ 'times':ptimes, 'phase':phase, 'mags':pmags, 'errs':perrs, 'magsarefluxes':magsarefluxes } } # if the fit didn't succeed, we can't proceed else: LOGERROR('initial Fourier fit did not succeed, ' 'reason: %s, returning scipy OptimizeResult' % initialfit.message) return { 'fittype':'fourier', 'fitinfo':{ 'fourierorder':fourierorder, 'finalparams':None, 'initialfit':initialfit, 'leastsqfit':None, 'fitmags':None, 'fitepoch':None }, 'fitchisq':npnan, 'fitredchisq':npnan, 'fitplotfile':None, 'magseries':{ 'times':ptimes, 'phase':phase, 'mags':pmags, 'errs':perrs, 'magsarefluxes':magsarefluxes } }
[ "def", "fourier_fit_magseries", "(", "times", ",", "mags", ",", "errs", ",", "period", ",", "fourierorder", "=", "None", ",", "fourierparams", "=", "None", ",", "sigclip", "=", "3.0", ",", "magsarefluxes", "=", "False", ",", "plotfit", "=", "False", ",", "ignoreinitfail", "=", "True", ",", "verbose", "=", "True", ")", ":", "stimes", ",", "smags", ",", "serrs", "=", "sigclip_magseries", "(", "times", ",", "mags", ",", "errs", ",", "sigclip", "=", "sigclip", ",", "magsarefluxes", "=", "magsarefluxes", ")", "# get rid of zero errs", "nzind", "=", "npnonzero", "(", "serrs", ")", "stimes", ",", "smags", ",", "serrs", "=", "stimes", "[", "nzind", "]", ",", "smags", "[", "nzind", "]", ",", "serrs", "[", "nzind", "]", "phase", ",", "pmags", ",", "perrs", ",", "ptimes", ",", "mintime", "=", "(", "get_phased_quantities", "(", "stimes", ",", "smags", ",", "serrs", ",", "period", ")", ")", "# get the fourier order either from the scalar order kwarg...", "if", "fourierorder", "and", "fourierorder", ">", "0", "and", "not", "fourierparams", ":", "fourieramps", "=", "[", "0.6", "]", "+", "[", "0.2", "]", "*", "(", "fourierorder", "-", "1", ")", "fourierphas", "=", "[", "0.1", "]", "+", "[", "0.1", "]", "*", "(", "fourierorder", "-", "1", ")", "fourierparams", "=", "fourieramps", "+", "fourierphas", "# or from the fully specified coeffs vector", "elif", "not", "fourierorder", "and", "fourierparams", ":", "fourierorder", "=", "int", "(", "len", "(", "fourierparams", ")", "/", "2", ")", "else", ":", "LOGWARNING", "(", "'specified both/neither Fourier order AND Fourier coeffs, '", "'using default Fourier order of 3'", ")", "fourierorder", "=", "3", "fourieramps", "=", "[", "0.6", "]", "+", "[", "0.2", "]", "*", "(", "fourierorder", "-", "1", ")", "fourierphas", "=", "[", "0.1", "]", "+", "[", "0.1", "]", "*", "(", "fourierorder", "-", "1", ")", "fourierparams", "=", "fourieramps", "+", "fourierphas", "if", "verbose", ":", "LOGINFO", "(", "'fitting Fourier series of order %s to '", "'mag series with %s observations, '", "'using period %.6f, folded at %.6f'", "%", "(", "fourierorder", ",", "len", "(", "phase", ")", ",", "period", ",", "mintime", ")", ")", "# initial minimize call to find global minimum in chi-sq", "initialfit", "=", "spminimize", "(", "_fourier_chisq", ",", "fourierparams", ",", "method", "=", "'BFGS'", ",", "args", "=", "(", "phase", ",", "pmags", ",", "perrs", ")", ")", "# make sure this initial fit succeeds before proceeding", "if", "initialfit", ".", "success", "or", "ignoreinitfail", ":", "if", "verbose", ":", "LOGINFO", "(", "'initial fit done, refining...'", ")", "leastsqparams", "=", "initialfit", ".", "x", "try", ":", "leastsqfit", "=", "spleastsq", "(", "_fourier_residual", ",", "leastsqparams", ",", "args", "=", "(", "phase", ",", "pmags", ")", ")", "except", "Exception", "as", "e", ":", "leastsqfit", "=", "None", "# if the fit succeeded, then we can return the final parameters", "if", "leastsqfit", "and", "leastsqfit", "[", "-", "1", "]", "in", "(", "1", ",", "2", ",", "3", ",", "4", ")", ":", "finalparams", "=", "leastsqfit", "[", "0", "]", "# calculate the chisq and reduced chisq", "fitmags", "=", "_fourier_func", "(", "finalparams", ",", "phase", ",", "pmags", ")", "fitchisq", "=", "npsum", "(", "(", "(", "fitmags", "-", "pmags", ")", "*", "(", "fitmags", "-", "pmags", ")", ")", "/", "(", "perrs", "*", "perrs", ")", ")", "fitredchisq", "=", "fitchisq", "/", "(", "len", "(", "pmags", ")", "-", "len", "(", "finalparams", ")", "-", "1", ")", "if", "verbose", ":", "LOGINFO", "(", "'final fit done. chisq = %.5f, reduced chisq = %.5f'", "%", "(", "fitchisq", ",", "fitredchisq", ")", ")", "# figure out the time of light curve minimum (i.e. the fit epoch)", "# this is when the fit mag is maximum (i.e. the faintest)", "# or if magsarefluxes = True, then this is when fit flux is minimum", "if", "not", "magsarefluxes", ":", "fitmagminind", "=", "npwhere", "(", "fitmags", "==", "npmax", "(", "fitmags", ")", ")", "else", ":", "fitmagminind", "=", "npwhere", "(", "fitmags", "==", "npmin", "(", "fitmags", ")", ")", "if", "len", "(", "fitmagminind", "[", "0", "]", ")", ">", "1", ":", "fitmagminind", "=", "(", "fitmagminind", "[", "0", "]", "[", "0", "]", ",", ")", "# assemble the returndict", "returndict", "=", "{", "'fittype'", ":", "'fourier'", ",", "'fitinfo'", ":", "{", "'fourierorder'", ":", "fourierorder", ",", "'finalparams'", ":", "finalparams", ",", "'initialfit'", ":", "initialfit", ",", "'leastsqfit'", ":", "leastsqfit", ",", "'fitmags'", ":", "fitmags", ",", "'fitepoch'", ":", "mintime", ",", "'actual_fitepoch'", ":", "ptimes", "[", "fitmagminind", "]", "}", ",", "'fitchisq'", ":", "fitchisq", ",", "'fitredchisq'", ":", "fitredchisq", ",", "'fitplotfile'", ":", "None", ",", "'magseries'", ":", "{", "'times'", ":", "ptimes", ",", "'phase'", ":", "phase", ",", "'mags'", ":", "pmags", ",", "'errs'", ":", "perrs", ",", "'magsarefluxes'", ":", "magsarefluxes", "}", ",", "}", "# make the fit plot if required", "if", "plotfit", "and", "isinstance", "(", "plotfit", ",", "str", ")", ":", "make_fit_plot", "(", "phase", ",", "pmags", ",", "perrs", ",", "fitmags", ",", "period", ",", "mintime", ",", "mintime", ",", "plotfit", ",", "magsarefluxes", "=", "magsarefluxes", ")", "returndict", "[", "'fitplotfile'", "]", "=", "plotfit", "return", "returndict", "# if the leastsq fit did not succeed, return Nothing", "else", ":", "LOGERROR", "(", "'fourier-fit: least-squared fit to the light curve failed'", ")", "return", "{", "'fittype'", ":", "'fourier'", ",", "'fitinfo'", ":", "{", "'fourierorder'", ":", "fourierorder", ",", "'finalparams'", ":", "None", ",", "'initialfit'", ":", "initialfit", ",", "'leastsqfit'", ":", "None", ",", "'fitmags'", ":", "None", ",", "'fitepoch'", ":", "None", "}", ",", "'fitchisq'", ":", "npnan", ",", "'fitredchisq'", ":", "npnan", ",", "'fitplotfile'", ":", "None", ",", "'magseries'", ":", "{", "'times'", ":", "ptimes", ",", "'phase'", ":", "phase", ",", "'mags'", ":", "pmags", ",", "'errs'", ":", "perrs", ",", "'magsarefluxes'", ":", "magsarefluxes", "}", "}", "# if the fit didn't succeed, we can't proceed", "else", ":", "LOGERROR", "(", "'initial Fourier fit did not succeed, '", "'reason: %s, returning scipy OptimizeResult'", "%", "initialfit", ".", "message", ")", "return", "{", "'fittype'", ":", "'fourier'", ",", "'fitinfo'", ":", "{", "'fourierorder'", ":", "fourierorder", ",", "'finalparams'", ":", "None", ",", "'initialfit'", ":", "initialfit", ",", "'leastsqfit'", ":", "None", ",", "'fitmags'", ":", "None", ",", "'fitepoch'", ":", "None", "}", ",", "'fitchisq'", ":", "npnan", ",", "'fitredchisq'", ":", "npnan", ",", "'fitplotfile'", ":", "None", ",", "'magseries'", ":", "{", "'times'", ":", "ptimes", ",", "'phase'", ":", "phase", ",", "'mags'", ":", "pmags", ",", "'errs'", ":", "perrs", ",", "'magsarefluxes'", ":", "magsarefluxes", "}", "}" ]
38.410959
22.267123
def SetBackingStore(cls, backing): """Set the global backing type used by the ComponentRegistry from this point forward This function must be called before any operations that use the registry are initiated otherwise they will work from different registries that will likely contain different data """ if backing not in ['json', 'sqlite', 'memory']: raise ArgumentError("Unknown backing store type that is not json or sqlite", backing=backing) if backing == 'json': cls.BackingType = JSONKVStore cls.BackingFileName = 'component_registry.json' elif backing == 'memory': cls.BackingType = InMemoryKVStore cls.BackingFileName = None else: cls.BackingType = SQLiteKVStore cls.BackingFileName = 'component_registry.db'
[ "def", "SetBackingStore", "(", "cls", ",", "backing", ")", ":", "if", "backing", "not", "in", "[", "'json'", ",", "'sqlite'", ",", "'memory'", "]", ":", "raise", "ArgumentError", "(", "\"Unknown backing store type that is not json or sqlite\"", ",", "backing", "=", "backing", ")", "if", "backing", "==", "'json'", ":", "cls", ".", "BackingType", "=", "JSONKVStore", "cls", ".", "BackingFileName", "=", "'component_registry.json'", "elif", "backing", "==", "'memory'", ":", "cls", ".", "BackingType", "=", "InMemoryKVStore", "cls", ".", "BackingFileName", "=", "None", "else", ":", "cls", ".", "BackingType", "=", "SQLiteKVStore", "cls", ".", "BackingFileName", "=", "'component_registry.db'" ]
44.578947
21.578947
def remove_organisation_from_all(cls, organisation_id): """Remove an organisation from all users""" users = yield views.organisation_members.get(key=organisation_id, include_docs=True) users = [x['doc'] for x in users['rows']] for user in users: user['organisations'][organisation_id]['state'] = State.deactivated.name db = cls.db_client() yield db.save_docs(users)
[ "def", "remove_organisation_from_all", "(", "cls", ",", "organisation_id", ")", ":", "users", "=", "yield", "views", ".", "organisation_members", ".", "get", "(", "key", "=", "organisation_id", ",", "include_docs", "=", "True", ")", "users", "=", "[", "x", "[", "'doc'", "]", "for", "x", "in", "users", "[", "'rows'", "]", "]", "for", "user", "in", "users", ":", "user", "[", "'organisations'", "]", "[", "organisation_id", "]", "[", "'state'", "]", "=", "State", ".", "deactivated", ".", "name", "db", "=", "cls", ".", "db_client", "(", ")", "yield", "db", ".", "save_docs", "(", "users", ")" ]
47
20.5
def calc(path): ''' Takes a path as an argument and returns the total size in bytes of the file or directory. If the path is a directory the size will be calculated recursively. ''' total = 0 err = None if os.path.isdir(path): try: for entry in os.scandir(path): try: is_dir = entry.is_dir(follow_symlinks=False) except (PermissionError, FileNotFoundError): err = "!" return total, err if is_dir: result = calc(entry.path) total += result[0] err = result[1] if err: return total, err else: try: total += entry.stat(follow_symlinks=False).st_size except (PermissionError, FileNotFoundError): err = "!" return total, err except (PermissionError, FileNotFoundError): err = "!" return total, err else: total += os.path.getsize(path) return total, err
[ "def", "calc", "(", "path", ")", ":", "total", "=", "0", "err", "=", "None", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "try", ":", "for", "entry", "in", "os", ".", "scandir", "(", "path", ")", ":", "try", ":", "is_dir", "=", "entry", ".", "is_dir", "(", "follow_symlinks", "=", "False", ")", "except", "(", "PermissionError", ",", "FileNotFoundError", ")", ":", "err", "=", "\"!\"", "return", "total", ",", "err", "if", "is_dir", ":", "result", "=", "calc", "(", "entry", ".", "path", ")", "total", "+=", "result", "[", "0", "]", "err", "=", "result", "[", "1", "]", "if", "err", ":", "return", "total", ",", "err", "else", ":", "try", ":", "total", "+=", "entry", ".", "stat", "(", "follow_symlinks", "=", "False", ")", ".", "st_size", "except", "(", "PermissionError", ",", "FileNotFoundError", ")", ":", "err", "=", "\"!\"", "return", "total", ",", "err", "except", "(", "PermissionError", ",", "FileNotFoundError", ")", ":", "err", "=", "\"!\"", "return", "total", ",", "err", "else", ":", "total", "+=", "os", ".", "path", ".", "getsize", "(", "path", ")", "return", "total", ",", "err" ]
33.911765
17.5
def load_model(self, name=None): ''' Loads a saved version of the model. ''' if self.clobber: return False if name is None: name = self.name file = os.path.join(self.dir, '%s.npz' % name) if os.path.exists(file): if not self.is_parent: log.info("Loading '%s.npz'..." % name) try: data = np.load(file) for key in data.keys(): try: setattr(self, key, data[key][()]) except NotImplementedError: pass # HACK: Backwards compatibility. Previous version stored # the CDPP in the `cdpp6` # and `cdpp6_arr` attributes. Let's move them over. if hasattr(self, 'cdpp6'): self.cdpp = self.cdpp6 del self.cdpp6 if hasattr(self, 'cdpp6_arr'): self.cdpp_arr = np.array(self.cdpp6_arr) del self.cdpp6_arr if hasattr(self, 'gppp'): self.cdppg = self.gppp del self.gppp # HACK: At one point we were saving the figure instances, # so loading the .npz # opened a plotting window. I don't think this is the case # any more, so this # next line should be removed in the future... pl.close() return True except: log.warn("Error loading '%s.npz'." % name) exctype, value, tb = sys.exc_info() for line in traceback.format_exception_only(exctype, value): ln = line.replace('\n', '') log.warn(ln) os.rename(file, file + '.bad') if self.is_parent: raise Exception( 'Unable to load `%s` model for target %d.' % (self.name, self.ID)) return False
[ "def", "load_model", "(", "self", ",", "name", "=", "None", ")", ":", "if", "self", ".", "clobber", ":", "return", "False", "if", "name", "is", "None", ":", "name", "=", "self", ".", "name", "file", "=", "os", ".", "path", ".", "join", "(", "self", ".", "dir", ",", "'%s.npz'", "%", "name", ")", "if", "os", ".", "path", ".", "exists", "(", "file", ")", ":", "if", "not", "self", ".", "is_parent", ":", "log", ".", "info", "(", "\"Loading '%s.npz'...\"", "%", "name", ")", "try", ":", "data", "=", "np", ".", "load", "(", "file", ")", "for", "key", "in", "data", ".", "keys", "(", ")", ":", "try", ":", "setattr", "(", "self", ",", "key", ",", "data", "[", "key", "]", "[", "(", ")", "]", ")", "except", "NotImplementedError", ":", "pass", "# HACK: Backwards compatibility. Previous version stored", "# the CDPP in the `cdpp6`", "# and `cdpp6_arr` attributes. Let's move them over.", "if", "hasattr", "(", "self", ",", "'cdpp6'", ")", ":", "self", ".", "cdpp", "=", "self", ".", "cdpp6", "del", "self", ".", "cdpp6", "if", "hasattr", "(", "self", ",", "'cdpp6_arr'", ")", ":", "self", ".", "cdpp_arr", "=", "np", ".", "array", "(", "self", ".", "cdpp6_arr", ")", "del", "self", ".", "cdpp6_arr", "if", "hasattr", "(", "self", ",", "'gppp'", ")", ":", "self", ".", "cdppg", "=", "self", ".", "gppp", "del", "self", ".", "gppp", "# HACK: At one point we were saving the figure instances,", "# so loading the .npz", "# opened a plotting window. I don't think this is the case", "# any more, so this", "# next line should be removed in the future...", "pl", ".", "close", "(", ")", "return", "True", "except", ":", "log", ".", "warn", "(", "\"Error loading '%s.npz'.\"", "%", "name", ")", "exctype", ",", "value", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "for", "line", "in", "traceback", ".", "format_exception_only", "(", "exctype", ",", "value", ")", ":", "ln", "=", "line", ".", "replace", "(", "'\\n'", ",", "''", ")", "log", ".", "warn", "(", "ln", ")", "os", ".", "rename", "(", "file", ",", "file", "+", "'.bad'", ")", "if", "self", ".", "is_parent", ":", "raise", "Exception", "(", "'Unable to load `%s` model for target %d.'", "%", "(", "self", ".", "name", ",", "self", ".", "ID", ")", ")", "return", "False" ]
34.655172
16.827586
def check_type(self, value, attr, data): """Customize check_type for handling containers.""" # Check the type in the standard way first, in order to fail quickly # in case of invalid values. root_value = super(InstructionParameter, self).check_type( value, attr, data) if is_collection(value): _ = [super(InstructionParameter, self).check_type(item, attr, data) for item in value] return root_value
[ "def", "check_type", "(", "self", ",", "value", ",", "attr", ",", "data", ")", ":", "# Check the type in the standard way first, in order to fail quickly", "# in case of invalid values.", "root_value", "=", "super", "(", "InstructionParameter", ",", "self", ")", ".", "check_type", "(", "value", ",", "attr", ",", "data", ")", "if", "is_collection", "(", "value", ")", ":", "_", "=", "[", "super", "(", "InstructionParameter", ",", "self", ")", ".", "check_type", "(", "item", ",", "attr", ",", "data", ")", "for", "item", "in", "value", "]", "return", "root_value" ]
39.833333
18.583333
def disablingBuidCache(self): ''' Disable and invalidate the layer buid cache for migration ''' self.buidcache = s_cache.LruDict(0) yield self.buidcache = s_cache.LruDict(BUID_CACHE_SIZE)
[ "def", "disablingBuidCache", "(", "self", ")", ":", "self", ".", "buidcache", "=", "s_cache", ".", "LruDict", "(", "0", ")", "yield", "self", ".", "buidcache", "=", "s_cache", ".", "LruDict", "(", "BUID_CACHE_SIZE", ")" ]
32.714286
20.142857
def process_firehose_archive(bucket, key): """Download firehose archive, aggregate records in memory and write back.""" data = {} with tempfile.NamedTemporaryFile(mode='w+b') as fh: s3.download_file(bucket, key, fh.name) log.warning("Downloaded Key Size:%s Key:%s", sizeof_fmt(os.path.getsize(fh.name)), key) fh.seek(0, 0) record_count = 0 iteration_count = 0 for r in records_iter(gzip.GzipFile(fh.name, mode='r')): record_count += len(r['logEvents']) iteration_count += 1 key = '%s/%s/%s' % (r['owner'], r['logGroup'], r['logStream']) data.setdefault(key, []).extend(r['logEvents']) if record_count > EVENTS_SIZE_BUFFER: log.warning( "Incremental Data Load records:%d enis:%d", record_count, len(data)) for k in data: process_record_set(k, data[k]) data.clear() gc.collect() record_count = 0 for k in data: process_record_set(k, data[k]) data.clear() gc.collect()
[ "def", "process_firehose_archive", "(", "bucket", ",", "key", ")", ":", "data", "=", "{", "}", "with", "tempfile", ".", "NamedTemporaryFile", "(", "mode", "=", "'w+b'", ")", "as", "fh", ":", "s3", ".", "download_file", "(", "bucket", ",", "key", ",", "fh", ".", "name", ")", "log", ".", "warning", "(", "\"Downloaded Key Size:%s Key:%s\"", ",", "sizeof_fmt", "(", "os", ".", "path", ".", "getsize", "(", "fh", ".", "name", ")", ")", ",", "key", ")", "fh", ".", "seek", "(", "0", ",", "0", ")", "record_count", "=", "0", "iteration_count", "=", "0", "for", "r", "in", "records_iter", "(", "gzip", ".", "GzipFile", "(", "fh", ".", "name", ",", "mode", "=", "'r'", ")", ")", ":", "record_count", "+=", "len", "(", "r", "[", "'logEvents'", "]", ")", "iteration_count", "+=", "1", "key", "=", "'%s/%s/%s'", "%", "(", "r", "[", "'owner'", "]", ",", "r", "[", "'logGroup'", "]", ",", "r", "[", "'logStream'", "]", ")", "data", ".", "setdefault", "(", "key", ",", "[", "]", ")", ".", "extend", "(", "r", "[", "'logEvents'", "]", ")", "if", "record_count", ">", "EVENTS_SIZE_BUFFER", ":", "log", ".", "warning", "(", "\"Incremental Data Load records:%d enis:%d\"", ",", "record_count", ",", "len", "(", "data", ")", ")", "for", "k", "in", "data", ":", "process_record_set", "(", "k", ",", "data", "[", "k", "]", ")", "data", ".", "clear", "(", ")", "gc", ".", "collect", "(", ")", "record_count", "=", "0", "for", "k", "in", "data", ":", "process_record_set", "(", "k", ",", "data", "[", "k", "]", ")", "data", ".", "clear", "(", ")", "gc", ".", "collect", "(", ")" ]
39.1
14.566667
def types(self): """ List of the known event types """ r = requests.get(self.evaluator_url + 'types') r.raise_for_status() return r.json()
[ "def", "types", "(", "self", ")", ":", "r", "=", "requests", ".", "get", "(", "self", ".", "evaluator_url", "+", "'types'", ")", "r", ".", "raise_for_status", "(", ")", "return", "r", ".", "json", "(", ")" ]
25.714286
10
def is_rate_matrix(K, tol): """ True if K is a rate matrix Parameters ---------- K : scipy.sparse matrix Matrix to check tol : float tolerance to check with Returns ------- Truth value : bool True, if K negated diagonal is positive and row sums up to zero. False, otherwise """ K = K.tocsr() # check rows sum up to zero. row_sum = K.sum(axis=1) sum_eq_zero = np.allclose(row_sum, np.zeros(shape=row_sum.shape), atol=tol) # store copy of original diagonal org_diag = K.diagonal() # substract diagonal K = K - diags(org_diag, 0) # check off diagonals are > 0 values = K.data values_gt_zero = np.allclose(values, np.abs(values), atol=tol) # add diagonal K = K + diags(org_diag, 0) return values_gt_zero and sum_eq_zero
[ "def", "is_rate_matrix", "(", "K", ",", "tol", ")", ":", "K", "=", "K", ".", "tocsr", "(", ")", "# check rows sum up to zero.", "row_sum", "=", "K", ".", "sum", "(", "axis", "=", "1", ")", "sum_eq_zero", "=", "np", ".", "allclose", "(", "row_sum", ",", "np", ".", "zeros", "(", "shape", "=", "row_sum", ".", "shape", ")", ",", "atol", "=", "tol", ")", "# store copy of original diagonal", "org_diag", "=", "K", ".", "diagonal", "(", ")", "# substract diagonal", "K", "=", "K", "-", "diags", "(", "org_diag", ",", "0", ")", "# check off diagonals are > 0", "values", "=", "K", ".", "data", "values_gt_zero", "=", "np", ".", "allclose", "(", "values", ",", "np", ".", "abs", "(", "values", ")", ",", "atol", "=", "tol", ")", "# add diagonal", "K", "=", "K", "+", "diags", "(", "org_diag", ",", "0", ")", "return", "values_gt_zero", "and", "sum_eq_zero" ]
22.638889
20.972222
def load_plugin_modules(self, modnames): """take a list of module names which are pylint plugins and load and register them """ for modname in modnames: if modname in self._dynamic_plugins: continue self._dynamic_plugins.add(modname) module = modutils.load_module_from_name(modname) module.register(self)
[ "def", "load_plugin_modules", "(", "self", ",", "modnames", ")", ":", "for", "modname", "in", "modnames", ":", "if", "modname", "in", "self", ".", "_dynamic_plugins", ":", "continue", "self", ".", "_dynamic_plugins", ".", "add", "(", "modname", ")", "module", "=", "modutils", ".", "load_module_from_name", "(", "modname", ")", "module", ".", "register", "(", "self", ")" ]
39.1
8
def get_column_names(self, virtual=True, strings=True, hidden=False, regex=None): """Return a list of column names Example: >>> import vaex >>> df = vaex.from_scalars(x=1, x2=2, y=3, s='string') >>> df['r'] = (df.x**2 + df.y**2)**2 >>> df.get_column_names() ['x', 'x2', 'y', 's', 'r'] >>> df.get_column_names(virtual=False) ['x', 'x2', 'y', 's'] >>> df.get_column_names(regex='x.*') ['x', 'x2'] :param virtual: If False, skip virtual columns :param hidden: If False, skip hidden columns :param strings: If False, skip string columns :param regex: Only return column names matching the (optional) regular expression :rtype: list of str Example: >>> import vaex >>> df = vaex.from_scalars(x=1, x2=2, y=3, s='string') >>> df['r'] = (df.x**2 + df.y**2)**2 >>> df.get_column_names() ['x', 'x2', 'y', 's', 'r'] >>> df.get_column_names(virtual=False) ['x', 'x2', 'y', 's'] >>> df.get_column_names(regex='x.*') ['x', 'x2'] """ def column_filter(name): '''Return True if column with specified name should be returned''' if regex and not re.match(regex, name): return False if not virtual and name in self.virtual_columns: return False if not strings and (self.dtype(name) == str_type or self.dtype(name).type == np.string_): return False if not hidden and name.startswith('__'): return False return True return [name for name in self.column_names if column_filter(name)]
[ "def", "get_column_names", "(", "self", ",", "virtual", "=", "True", ",", "strings", "=", "True", ",", "hidden", "=", "False", ",", "regex", "=", "None", ")", ":", "def", "column_filter", "(", "name", ")", ":", "'''Return True if column with specified name should be returned'''", "if", "regex", "and", "not", "re", ".", "match", "(", "regex", ",", "name", ")", ":", "return", "False", "if", "not", "virtual", "and", "name", "in", "self", ".", "virtual_columns", ":", "return", "False", "if", "not", "strings", "and", "(", "self", ".", "dtype", "(", "name", ")", "==", "str_type", "or", "self", ".", "dtype", "(", "name", ")", ".", "type", "==", "np", ".", "string_", ")", ":", "return", "False", "if", "not", "hidden", "and", "name", ".", "startswith", "(", "'__'", ")", ":", "return", "False", "return", "True", "return", "[", "name", "for", "name", "in", "self", ".", "column_names", "if", "column_filter", "(", "name", ")", "]" ]
38.409091
18.068182
def process(self, items_block): """Process items to add file related information. Eventize items creating one new item per each file found in the commit (excluding files with no actions performed on them). For each event, file path, file name, path parts, file type and file extension are added as fields. :param items_block: items to be processed. Expects to find ElasticSearch hits _source part only. """ logger.info(self.__log_prefix + " New commits: " + str(len(items_block))) # Create events from commits git_events = Git(items_block, self._git_enrich) events_df = git_events.eventize(2) logger.info(self.__log_prefix + " New events: " + str(len(events_df))) if len(events_df) > 0: # Filter information data_filtered = FilterRows(events_df) events_df = data_filtered.filter_(["filepath"], "-") logger.info(self.__log_prefix + " New events filtered: " + str(len(events_df))) events_df['message'] = events_df['message'].str.slice(stop=AreasOfCode.MESSAGE_MAX_SIZE) logger.info(self.__log_prefix + " Remove message content") # Add filetype info enriched_filetype = FileType(events_df) events_df = enriched_filetype.enrich('filepath') logger.info(self.__log_prefix + " New Filetype events: " + str(len(events_df))) # Split filepath info enriched_filepath = FilePath(events_df) events_df = enriched_filepath.enrich('filepath') logger.info(self.__log_prefix + " New Filepath events: " + str(len(events_df))) # Deal with surrogates convert = ToUTF8(events_df) events_df = convert.enrich(["owner"]) logger.info(self.__log_prefix + " Final new events: " + str(len(events_df))) return self.ProcessResults(processed=len(events_df), out_items=events_df)
[ "def", "process", "(", "self", ",", "items_block", ")", ":", "logger", ".", "info", "(", "self", ".", "__log_prefix", "+", "\" New commits: \"", "+", "str", "(", "len", "(", "items_block", ")", ")", ")", "# Create events from commits", "git_events", "=", "Git", "(", "items_block", ",", "self", ".", "_git_enrich", ")", "events_df", "=", "git_events", ".", "eventize", "(", "2", ")", "logger", ".", "info", "(", "self", ".", "__log_prefix", "+", "\" New events: \"", "+", "str", "(", "len", "(", "events_df", ")", ")", ")", "if", "len", "(", "events_df", ")", ">", "0", ":", "# Filter information", "data_filtered", "=", "FilterRows", "(", "events_df", ")", "events_df", "=", "data_filtered", ".", "filter_", "(", "[", "\"filepath\"", "]", ",", "\"-\"", ")", "logger", ".", "info", "(", "self", ".", "__log_prefix", "+", "\" New events filtered: \"", "+", "str", "(", "len", "(", "events_df", ")", ")", ")", "events_df", "[", "'message'", "]", "=", "events_df", "[", "'message'", "]", ".", "str", ".", "slice", "(", "stop", "=", "AreasOfCode", ".", "MESSAGE_MAX_SIZE", ")", "logger", ".", "info", "(", "self", ".", "__log_prefix", "+", "\" Remove message content\"", ")", "# Add filetype info", "enriched_filetype", "=", "FileType", "(", "events_df", ")", "events_df", "=", "enriched_filetype", ".", "enrich", "(", "'filepath'", ")", "logger", ".", "info", "(", "self", ".", "__log_prefix", "+", "\" New Filetype events: \"", "+", "str", "(", "len", "(", "events_df", ")", ")", ")", "# Split filepath info", "enriched_filepath", "=", "FilePath", "(", "events_df", ")", "events_df", "=", "enriched_filepath", ".", "enrich", "(", "'filepath'", ")", "logger", ".", "info", "(", "self", ".", "__log_prefix", "+", "\" New Filepath events: \"", "+", "str", "(", "len", "(", "events_df", ")", ")", ")", "# Deal with surrogates", "convert", "=", "ToUTF8", "(", "events_df", ")", "events_df", "=", "convert", ".", "enrich", "(", "[", "\"owner\"", "]", ")", "logger", ".", "info", "(", "self", ".", "__log_prefix", "+", "\" Final new events: \"", "+", "str", "(", "len", "(", "events_df", ")", ")", ")", "return", "self", ".", "ProcessResults", "(", "processed", "=", "len", "(", "events_df", ")", ",", "out_items", "=", "events_df", ")" ]
41.085106
29.170213
def resp_graph(dataframe, image_name, dir='./'): """Response time graph for bucketed data :param pandas.DataFrame dataframe: dataframe containing all data :param str image_name: the output file name :param str dir: the output directory :return: None """ fig = pygal.TimeLine(x_title='Elapsed Time In Test (secs)', y_title='Response Time (secs)', x_label_rotation=25, js=('scripts/pygal-tooltip.min.js',)) fig.add('AVG', [(get_local_time(index), row['mean'] if pd.notnull(row['mean']) else None) for index, row in dataframe.iterrows()]) fig.add('90%', [(get_local_time(index), row['90%'] if pd.notnull(row['90%']) else None) for index, row in dataframe.iterrows()]) fig.add('80%', [(get_local_time(index), row['80%'] if pd.notnull(row['80%']) else None) for index, row in dataframe.iterrows()]) fig.render_to_file(filename=os.path.join(dir, image_name))
[ "def", "resp_graph", "(", "dataframe", ",", "image_name", ",", "dir", "=", "'./'", ")", ":", "fig", "=", "pygal", ".", "TimeLine", "(", "x_title", "=", "'Elapsed Time In Test (secs)'", ",", "y_title", "=", "'Response Time (secs)'", ",", "x_label_rotation", "=", "25", ",", "js", "=", "(", "'scripts/pygal-tooltip.min.js'", ",", ")", ")", "fig", ".", "add", "(", "'AVG'", ",", "[", "(", "get_local_time", "(", "index", ")", ",", "row", "[", "'mean'", "]", "if", "pd", ".", "notnull", "(", "row", "[", "'mean'", "]", ")", "else", "None", ")", "for", "index", ",", "row", "in", "dataframe", ".", "iterrows", "(", ")", "]", ")", "fig", ".", "add", "(", "'90%'", ",", "[", "(", "get_local_time", "(", "index", ")", ",", "row", "[", "'90%'", "]", "if", "pd", ".", "notnull", "(", "row", "[", "'90%'", "]", ")", "else", "None", ")", "for", "index", ",", "row", "in", "dataframe", ".", "iterrows", "(", ")", "]", ")", "fig", ".", "add", "(", "'80%'", ",", "[", "(", "get_local_time", "(", "index", ")", ",", "row", "[", "'80%'", "]", "if", "pd", ".", "notnull", "(", "row", "[", "'80%'", "]", ")", "else", "None", ")", "for", "index", ",", "row", "in", "dataframe", ".", "iterrows", "(", ")", "]", ")", "fig", ".", "render_to_file", "(", "filename", "=", "os", ".", "path", ".", "join", "(", "dir", ",", "image_name", ")", ")" ]
53.368421
21.526316
def devectorize_utterance(self, utterance): """ Take in a sequence of indices and transform it back into a tokenized utterance """ utterance = self.swap_pad_and_zero(utterance) return self.ie.inverse_transform(utterance).tolist()
[ "def", "devectorize_utterance", "(", "self", ",", "utterance", ")", ":", "utterance", "=", "self", ".", "swap_pad_and_zero", "(", "utterance", ")", "return", "self", ".", "ie", ".", "inverse_transform", "(", "utterance", ")", ".", "tolist", "(", ")" ]
44
13.666667
def makefile(identifier, dependencies, makepath, compileid, precompile=False, inclfortpy=True, parser=None, executable=True, extralinks=None, inclfpyaux=False, makefpyaux=False, verbose=False): """Generates a makefile to create the unit testing executable for the specified test identifier. :arg identifier: the id of the test/library that this makefile should be made for. :arg dependencies: a list of the module names that need to be included in the compilation. :arg makepath: the path to the file to save the Makefile in. :arg compileid: the 'module.executable' that this Makefile is being produced for. :arg precompile: when True, the precompiler flags will be added to the makefile. :arg inclfortpy: when True, the fortpy module will be added first to the list of modules to compile for the executable/library. :arg parser: if the module file names are different from the module names, specify a code parser to use for converting one to the other. :arg executable: when true and executable is compiled for rule 'all', else the library is the default and the executable is set as a different rule for 'identifier'.x. :arg extralinks: a list of additional libraries to link in with the explicitly compiled f90 files. These aren't checked at all, just added to the linklist. :arg verbose: when True, the full compilation header will be printed with flags and module information; otherwise it won't. """ lines = [] #Append the general variables lines.append("EXENAME\t\t= {}.x".format(identifier)) lines.append("SHELL\t\t= /bin/bash") lines.append("UNAME\t\t= $(shell uname)") lines.append("HOSTNAME\t= $(shell hostname)") lines.append("LOG\t\t= compile.{}.log".format(identifier if identifier is not None else "default")) lines.append("") #Now the standard entries for ifort. We will just have the ifort include #file so that the MPI and other options can be tested to. lines.append(_make_compiler_include(precompile, extralinks)) lines.append(".SILENT:") lines.append("") #Append all the dependent modules to the makefile lines.append("LIBMODULESF90\t= \\") for modk in dependencies: if modk not in ["fortpy", "fpy_auxiliary", identifier]: if parser is not None: lines.append("\t\t{} \\".format(_get_mapping(parser, modk))) else: lines.append("\t\t{} \\".format(modk)) if makefpyaux: lines.append("\t\tfpy_auxiliary.f90 \\") lines.append("") lines.append("MAINF90\t\t= {}.f90".format(identifier)) lines.append("SRCF90\t\t= $(LIBMODULESF90) $(MAINF90)") lines.append("OBJSF90\t\t= $(SRCF90:.f90=.o)") lines.append("SLIBF90\t\t= $(LIBMODULESF90:.f90=.o)") lines.append("") #Add explicitly defined libraries that should be included when linking #the unit testing executable. linklibs = True _add_explicit_includes(lines, dependencies, extralinks) if inclfortpy or inclfpyaux: import sys if len(sys.modules["config"].includes) == 0: lines.append("LIBS\t\t= \\") if inclfortpy: lines.append("\t\tfortpy.o \\") if inclfpyaux: lines.append("\t\tfpy_aux.so \\") lines.append("") #We need to add the error handling commands to make debugging compiling easier. lines.append(_make_error()) lines.append("") main = "$(EXENAME)" if executable == True else "{}.{}".format(identifier, executable) lines.append("all: info {}".format(main)) lines.append(_make_info(compileid, verbose)) lines.append(_make_exe(linklibs, identifier, verbose)) from os import path makedir, makef = path.split(makepath) lines[-1] += " make -f '{}'".format(makef) with open(makepath, 'w') as f: f.writelines("\n".join(lines))
[ "def", "makefile", "(", "identifier", ",", "dependencies", ",", "makepath", ",", "compileid", ",", "precompile", "=", "False", ",", "inclfortpy", "=", "True", ",", "parser", "=", "None", ",", "executable", "=", "True", ",", "extralinks", "=", "None", ",", "inclfpyaux", "=", "False", ",", "makefpyaux", "=", "False", ",", "verbose", "=", "False", ")", ":", "lines", "=", "[", "]", "#Append the general variables", "lines", ".", "append", "(", "\"EXENAME\\t\\t= {}.x\"", ".", "format", "(", "identifier", ")", ")", "lines", ".", "append", "(", "\"SHELL\\t\\t= /bin/bash\"", ")", "lines", ".", "append", "(", "\"UNAME\\t\\t= $(shell uname)\"", ")", "lines", ".", "append", "(", "\"HOSTNAME\\t= $(shell hostname)\"", ")", "lines", ".", "append", "(", "\"LOG\\t\\t= compile.{}.log\"", ".", "format", "(", "identifier", "if", "identifier", "is", "not", "None", "else", "\"default\"", ")", ")", "lines", ".", "append", "(", "\"\"", ")", "#Now the standard entries for ifort. We will just have the ifort include", "#file so that the MPI and other options can be tested to.", "lines", ".", "append", "(", "_make_compiler_include", "(", "precompile", ",", "extralinks", ")", ")", "lines", ".", "append", "(", "\".SILENT:\"", ")", "lines", ".", "append", "(", "\"\"", ")", "#Append all the dependent modules to the makefile", "lines", ".", "append", "(", "\"LIBMODULESF90\\t= \\\\\"", ")", "for", "modk", "in", "dependencies", ":", "if", "modk", "not", "in", "[", "\"fortpy\"", ",", "\"fpy_auxiliary\"", ",", "identifier", "]", ":", "if", "parser", "is", "not", "None", ":", "lines", ".", "append", "(", "\"\\t\\t{} \\\\\"", ".", "format", "(", "_get_mapping", "(", "parser", ",", "modk", ")", ")", ")", "else", ":", "lines", ".", "append", "(", "\"\\t\\t{} \\\\\"", ".", "format", "(", "modk", ")", ")", "if", "makefpyaux", ":", "lines", ".", "append", "(", "\"\\t\\tfpy_auxiliary.f90 \\\\\"", ")", "lines", ".", "append", "(", "\"\"", ")", "lines", ".", "append", "(", "\"MAINF90\\t\\t= {}.f90\"", ".", "format", "(", "identifier", ")", ")", "lines", ".", "append", "(", "\"SRCF90\\t\\t= $(LIBMODULESF90) $(MAINF90)\"", ")", "lines", ".", "append", "(", "\"OBJSF90\\t\\t= $(SRCF90:.f90=.o)\"", ")", "lines", ".", "append", "(", "\"SLIBF90\\t\\t= $(LIBMODULESF90:.f90=.o)\"", ")", "lines", ".", "append", "(", "\"\"", ")", "#Add explicitly defined libraries that should be included when linking", "#the unit testing executable.", "linklibs", "=", "True", "_add_explicit_includes", "(", "lines", ",", "dependencies", ",", "extralinks", ")", "if", "inclfortpy", "or", "inclfpyaux", ":", "import", "sys", "if", "len", "(", "sys", ".", "modules", "[", "\"config\"", "]", ".", "includes", ")", "==", "0", ":", "lines", ".", "append", "(", "\"LIBS\\t\\t= \\\\\"", ")", "if", "inclfortpy", ":", "lines", ".", "append", "(", "\"\\t\\tfortpy.o \\\\\"", ")", "if", "inclfpyaux", ":", "lines", ".", "append", "(", "\"\\t\\tfpy_aux.so \\\\\"", ")", "lines", ".", "append", "(", "\"\"", ")", "#We need to add the error handling commands to make debugging compiling easier.", "lines", ".", "append", "(", "_make_error", "(", ")", ")", "lines", ".", "append", "(", "\"\"", ")", "main", "=", "\"$(EXENAME)\"", "if", "executable", "==", "True", "else", "\"{}.{}\"", ".", "format", "(", "identifier", ",", "executable", ")", "lines", ".", "append", "(", "\"all:\tinfo {}\"", ".", "format", "(", "main", ")", ")", "lines", ".", "append", "(", "_make_info", "(", "compileid", ",", "verbose", ")", ")", "lines", ".", "append", "(", "_make_exe", "(", "linklibs", ",", "identifier", ",", "verbose", ")", ")", "from", "os", "import", "path", "makedir", ",", "makef", "=", "path", ".", "split", "(", "makepath", ")", "lines", "[", "-", "1", "]", "+=", "\"\tmake -f '{}'\"", ".", "format", "(", "makef", ")", "with", "open", "(", "makepath", ",", "'w'", ")", "as", "f", ":", "f", ".", "writelines", "(", "\"\\n\"", ".", "join", "(", "lines", ")", ")" ]
43.292135
22.977528
def check_var(var, var_types:Union[type, List[type]] =None, var_name=None, enforce_not_none:bool = True, allowed_values:Set = None, min_value = None, min_strict:bool = False, max_value = None, max_strict:bool = False, min_len:int = None, min_len_strict:bool = False, max_len:int = None, max_len_strict:bool = False): """ Helper method to check that an object has certain properties: * not none * a certain type * in some accepted values * in some accepted range :param var: the object to check :param var_types: the type(s) to enforce. If None, type will not be enforced :param var_name: the name of the varioable to be used in error messages :param enforce_not_none: boolean, default True. Whether to enforce that var is not None. :param allowed_values: an optional set of allowed values :param min_value: an optional minimum value :param min_strict: if True, only values strictly greater than the minimum value will be accepted :param max_value: an optional maximum value :param max_strict: if True, only values strictly lesser than the minimum value will be accepted :return: """ var_name = var_name or 'object' if enforce_not_none and (var is None): # enforce not none raise MissingMandatoryParameterException('Error, ' + var_name + '" is mandatory, it should be non-None') if not (var is None) and not (var_types is None): # enforce type if not isinstance(var_types, list): var_types = [var_types] match = False for var_type in var_types: # just in case, even though users should use FunctionType or MethodType which is the true type if var_type is Callable: if callable(var): match = True break else: if isinstance(var, var_type): match = True break if not match: raise TypeError('Error, ' + var_name + '" should be one of type(s) ' + str(var_types) + ', found: ' + str(type(var))) if var is not None: if allowed_values is not None: # enforce allowed values if var not in allowed_values: raise TypeError('Error, ' + var_name + '" should be one of "' + str(allowed_values) + '", found: ' + str(var)) if min_value is not None: # enforce min value if min_strict: if not (var > min_value): raise TypeError( 'Error, ' + var_name + '" should be strictly greater than "' + str(min_value) + '", found: ' + str(var)) else: if not (var >= min_value): raise TypeError( 'Error, ' + var_name + '" should be greater than "' + str(min_value) + '", found: ' + str(var)) if max_value is not None: # enforce max value if max_strict: if not (var < max_value): raise TypeError( 'Error, ' + var_name + '" should be strictly lesser than "' + str(max_value) + '", found: ' + str(var)) else: if not (var <= max_value): raise TypeError( 'Error, ' + var_name + '" should be lesser than "' + str(max_value) + '", found: ' + str(var)) if min_len is not None: # enforce min length if min_len_strict: if not (len(var) > min_len): raise TypeError( 'Error, ' + var_name + '" length should be strictly greater than "' + str(min_len) + '", found: ' + str(len(var))) else: if not (len(var) >= min_len): raise TypeError( 'Error, ' + var_name + '" length should be greater than "' + str(min_len) + '", found: ' + str(len(var))) if max_len is not None: # enforce max length if max_len_strict: if not (len(var) < max_len): raise TypeError( 'Error, ' + var_name + '" length should be strictly lesser than "' + str(max_len) + '", found: ' + str(len(var))) else: if not (len(var) <= max_len): raise TypeError( 'Error, ' + var_name + '" length should be lesser than "' + str(max_len) + '", found: ' + str(len(var)))
[ "def", "check_var", "(", "var", ",", "var_types", ":", "Union", "[", "type", ",", "List", "[", "type", "]", "]", "=", "None", ",", "var_name", "=", "None", ",", "enforce_not_none", ":", "bool", "=", "True", ",", "allowed_values", ":", "Set", "=", "None", ",", "min_value", "=", "None", ",", "min_strict", ":", "bool", "=", "False", ",", "max_value", "=", "None", ",", "max_strict", ":", "bool", "=", "False", ",", "min_len", ":", "int", "=", "None", ",", "min_len_strict", ":", "bool", "=", "False", ",", "max_len", ":", "int", "=", "None", ",", "max_len_strict", ":", "bool", "=", "False", ")", ":", "var_name", "=", "var_name", "or", "'object'", "if", "enforce_not_none", "and", "(", "var", "is", "None", ")", ":", "# enforce not none", "raise", "MissingMandatoryParameterException", "(", "'Error, '", "+", "var_name", "+", "'\" is mandatory, it should be non-None'", ")", "if", "not", "(", "var", "is", "None", ")", "and", "not", "(", "var_types", "is", "None", ")", ":", "# enforce type", "if", "not", "isinstance", "(", "var_types", ",", "list", ")", ":", "var_types", "=", "[", "var_types", "]", "match", "=", "False", "for", "var_type", "in", "var_types", ":", "# just in case, even though users should use FunctionType or MethodType which is the true type", "if", "var_type", "is", "Callable", ":", "if", "callable", "(", "var", ")", ":", "match", "=", "True", "break", "else", ":", "if", "isinstance", "(", "var", ",", "var_type", ")", ":", "match", "=", "True", "break", "if", "not", "match", ":", "raise", "TypeError", "(", "'Error, '", "+", "var_name", "+", "'\" should be one of type(s) '", "+", "str", "(", "var_types", ")", "+", "', found: '", "+", "str", "(", "type", "(", "var", ")", ")", ")", "if", "var", "is", "not", "None", ":", "if", "allowed_values", "is", "not", "None", ":", "# enforce allowed values", "if", "var", "not", "in", "allowed_values", ":", "raise", "TypeError", "(", "'Error, '", "+", "var_name", "+", "'\" should be one of \"'", "+", "str", "(", "allowed_values", ")", "+", "'\", found: '", "+", "str", "(", "var", ")", ")", "if", "min_value", "is", "not", "None", ":", "# enforce min value", "if", "min_strict", ":", "if", "not", "(", "var", ">", "min_value", ")", ":", "raise", "TypeError", "(", "'Error, '", "+", "var_name", "+", "'\" should be strictly greater than \"'", "+", "str", "(", "min_value", ")", "+", "'\", found: '", "+", "str", "(", "var", ")", ")", "else", ":", "if", "not", "(", "var", ">=", "min_value", ")", ":", "raise", "TypeError", "(", "'Error, '", "+", "var_name", "+", "'\" should be greater than \"'", "+", "str", "(", "min_value", ")", "+", "'\", found: '", "+", "str", "(", "var", ")", ")", "if", "max_value", "is", "not", "None", ":", "# enforce max value", "if", "max_strict", ":", "if", "not", "(", "var", "<", "max_value", ")", ":", "raise", "TypeError", "(", "'Error, '", "+", "var_name", "+", "'\" should be strictly lesser than \"'", "+", "str", "(", "max_value", ")", "+", "'\", found: '", "+", "str", "(", "var", ")", ")", "else", ":", "if", "not", "(", "var", "<=", "max_value", ")", ":", "raise", "TypeError", "(", "'Error, '", "+", "var_name", "+", "'\" should be lesser than \"'", "+", "str", "(", "max_value", ")", "+", "'\", found: '", "+", "str", "(", "var", ")", ")", "if", "min_len", "is", "not", "None", ":", "# enforce min length", "if", "min_len_strict", ":", "if", "not", "(", "len", "(", "var", ")", ">", "min_len", ")", ":", "raise", "TypeError", "(", "'Error, '", "+", "var_name", "+", "'\" length should be strictly greater than \"'", "+", "str", "(", "min_len", ")", "+", "'\", found: '", "+", "str", "(", "len", "(", "var", ")", ")", ")", "else", ":", "if", "not", "(", "len", "(", "var", ")", ">=", "min_len", ")", ":", "raise", "TypeError", "(", "'Error, '", "+", "var_name", "+", "'\" length should be greater than \"'", "+", "str", "(", "min_len", ")", "+", "'\", found: '", "+", "str", "(", "len", "(", "var", ")", ")", ")", "if", "max_len", "is", "not", "None", ":", "# enforce max length", "if", "max_len_strict", ":", "if", "not", "(", "len", "(", "var", ")", "<", "max_len", ")", ":", "raise", "TypeError", "(", "'Error, '", "+", "var_name", "+", "'\" length should be strictly lesser than \"'", "+", "str", "(", "max_len", ")", "+", "'\", found: '", "+", "str", "(", "len", "(", "var", ")", ")", ")", "else", ":", "if", "not", "(", "len", "(", "var", ")", "<=", "max_len", ")", ":", "raise", "TypeError", "(", "'Error, '", "+", "var_name", "+", "'\" length should be lesser than \"'", "+", "str", "(", "max_len", ")", "+", "'\", found: '", "+", "str", "(", "len", "(", "var", ")", ")", ")" ]
45.408163
25.857143
def normalize(self, text, normalizations=None): """Normalize a given text applying all normalizations. Normalizations to apply can be specified through a list of parameters and will be executed in that order. Args: text: The text to be processed. normalizations: List of normalizations to apply. Returns: The text normalized. """ for normalization, kwargs in self._parse_normalizations( normalizations or self._config.normalizations): try: text = getattr(self, normalization)(text, **kwargs) except AttributeError as e: self._logger.debug('Invalid normalization: %s', e) return text
[ "def", "normalize", "(", "self", ",", "text", ",", "normalizations", "=", "None", ")", ":", "for", "normalization", ",", "kwargs", "in", "self", ".", "_parse_normalizations", "(", "normalizations", "or", "self", ".", "_config", ".", "normalizations", ")", ":", "try", ":", "text", "=", "getattr", "(", "self", ",", "normalization", ")", "(", "text", ",", "*", "*", "kwargs", ")", "except", "AttributeError", "as", "e", ":", "self", ".", "_logger", ".", "debug", "(", "'Invalid normalization: %s'", ",", "e", ")", "return", "text" ]
33.545455
21.590909
def handleNotification(self, handle, raw_data): # pylint: disable=unused-argument,invalid-name """ gets called by the bluepy backend when using wait_for_notification """ if raw_data is None: return data = raw_data.decode("utf-8").strip(' \n\t') self._cache = data self._check_data() if self.cache_available(): self._last_read = datetime.now() else: # If a sensor doesn't work, wait 5 minutes before retrying self._last_read = datetime.now() - self._cache_timeout + \ timedelta(seconds=300)
[ "def", "handleNotification", "(", "self", ",", "handle", ",", "raw_data", ")", ":", "# pylint: disable=unused-argument,invalid-name", "if", "raw_data", "is", "None", ":", "return", "data", "=", "raw_data", ".", "decode", "(", "\"utf-8\"", ")", ".", "strip", "(", "' \\n\\t'", ")", "self", ".", "_cache", "=", "data", "self", ".", "_check_data", "(", ")", "if", "self", ".", "cache_available", "(", ")", ":", "self", ".", "_last_read", "=", "datetime", ".", "now", "(", ")", "else", ":", "# If a sensor doesn't work, wait 5 minutes before retrying", "self", ".", "_last_read", "=", "datetime", ".", "now", "(", ")", "-", "self", ".", "_cache_timeout", "+", "timedelta", "(", "seconds", "=", "300", ")" ]
43.214286
16.428571
def datapath(self): """ Get an item's data path. """ path = self._fields['path'] if not path: # stopped item with no base_dir? path = self.fetch('directory') if path and not self._fields['is_multi_file']: path = os.path.join(path, self._fields['name']) return os.path.expanduser(fmt.to_unicode(path))
[ "def", "datapath", "(", "self", ")", ":", "path", "=", "self", ".", "_fields", "[", "'path'", "]", "if", "not", "path", ":", "# stopped item with no base_dir?", "path", "=", "self", ".", "fetch", "(", "'directory'", ")", "if", "path", "and", "not", "self", ".", "_fields", "[", "'is_multi_file'", "]", ":", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "self", ".", "_fields", "[", "'name'", "]", ")", "return", "os", ".", "path", ".", "expanduser", "(", "fmt", ".", "to_unicode", "(", "path", ")", ")" ]
41.444444
10.888889
def snmp_server_enable_trap_trap_flag(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") snmp_server = ET.SubElement(config, "snmp-server", xmlns="urn:brocade.com:mgmt:brocade-snmp") enable = ET.SubElement(snmp_server, "enable") trap = ET.SubElement(enable, "trap") trap_flag = ET.SubElement(trap, "trap-flag") callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "snmp_server_enable_trap_trap_flag", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "snmp_server", "=", "ET", ".", "SubElement", "(", "config", ",", "\"snmp-server\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-snmp\"", ")", "enable", "=", "ET", ".", "SubElement", "(", "snmp_server", ",", "\"enable\"", ")", "trap", "=", "ET", ".", "SubElement", "(", "enable", ",", "\"trap\"", ")", "trap_flag", "=", "ET", ".", "SubElement", "(", "trap", ",", "\"trap-flag\"", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
42.727273
15.727273
def get_negative(self, cls=None, **kwargs): """Returns a generator that generates negative cases by "each negative value in separate case" algorithm. """ for attr, set_of_values in kwargs.iteritems(): defaults = {key: kwargs[key][-1]["default"] for key in kwargs} defaults.pop(attr) for value in set_of_values[:-1]: case = cls() if cls else self._CasesClass() setattr(case, attr, value) for key in defaults: setattr(case, key, defaults[key]) yield case
[ "def", "get_negative", "(", "self", ",", "cls", "=", "None", ",", "*", "*", "kwargs", ")", ":", "for", "attr", ",", "set_of_values", "in", "kwargs", ".", "iteritems", "(", ")", ":", "defaults", "=", "{", "key", ":", "kwargs", "[", "key", "]", "[", "-", "1", "]", "[", "\"default\"", "]", "for", "key", "in", "kwargs", "}", "defaults", ".", "pop", "(", "attr", ")", "for", "value", "in", "set_of_values", "[", ":", "-", "1", "]", ":", "case", "=", "cls", "(", ")", "if", "cls", "else", "self", ".", "_CasesClass", "(", ")", "setattr", "(", "case", ",", "attr", ",", "value", ")", "for", "key", "in", "defaults", ":", "setattr", "(", "case", ",", "key", ",", "defaults", "[", "key", "]", ")", "yield", "case" ]
45.538462
10.307692
def pipelines(self): """ Property for accessing :class:`PipelineManager` instance, which is used to manage pipelines. :rtype: yagocd.resources.pipeline.PipelineManager """ if self._pipeline_manager is None: self._pipeline_manager = PipelineManager(session=self._session) return self._pipeline_manager
[ "def", "pipelines", "(", "self", ")", ":", "if", "self", ".", "_pipeline_manager", "is", "None", ":", "self", ".", "_pipeline_manager", "=", "PipelineManager", "(", "session", "=", "self", ".", "_session", ")", "return", "self", ".", "_pipeline_manager" ]
39.222222
19.666667
def make_span(parented_tree): """create a 'span' or 'leaf' subtree for dis/lisp/RST-DT-formatted trees. Examples: span (a subtree that covers the leaves 1 to 7) ___|____ 1 7 leaf (a subtree that only covers leaf 7) | 7 """ all_leaves = all_leaf_positions(parented_tree) if is_root(parented_tree): return t('span', ['1', str(len(all_leaves))]) subtree_leaves = subtree_leaf_positions(parented_tree) if len(subtree_leaves) == 1: edu_id = all_leaves.index(subtree_leaves[0]) + 1 return t('leaf', [str(edu_id)]) elif len(subtree_leaves) > 1: first_edu_id = all_leaves.index(subtree_leaves[0]) + 1 last_edu_id = all_leaves.index(subtree_leaves[-1]) + 1 return t('span', [str(first_edu_id), str(last_edu_id)]) else: raise NotImplementedError('Subtree has no leaves')
[ "def", "make_span", "(", "parented_tree", ")", ":", "all_leaves", "=", "all_leaf_positions", "(", "parented_tree", ")", "if", "is_root", "(", "parented_tree", ")", ":", "return", "t", "(", "'span'", ",", "[", "'1'", ",", "str", "(", "len", "(", "all_leaves", ")", ")", "]", ")", "subtree_leaves", "=", "subtree_leaf_positions", "(", "parented_tree", ")", "if", "len", "(", "subtree_leaves", ")", "==", "1", ":", "edu_id", "=", "all_leaves", ".", "index", "(", "subtree_leaves", "[", "0", "]", ")", "+", "1", "return", "t", "(", "'leaf'", ",", "[", "str", "(", "edu_id", ")", "]", ")", "elif", "len", "(", "subtree_leaves", ")", ">", "1", ":", "first_edu_id", "=", "all_leaves", ".", "index", "(", "subtree_leaves", "[", "0", "]", ")", "+", "1", "last_edu_id", "=", "all_leaves", ".", "index", "(", "subtree_leaves", "[", "-", "1", "]", ")", "+", "1", "return", "t", "(", "'span'", ",", "[", "str", "(", "first_edu_id", ")", ",", "str", "(", "last_edu_id", ")", "]", ")", "else", ":", "raise", "NotImplementedError", "(", "'Subtree has no leaves'", ")" ]
35.269231
18.576923
def write_text(_command, txt_file): """Dump SQL command to a text file.""" command = _command.strip() with open(txt_file, 'w') as txt: txt.writelines(command)
[ "def", "write_text", "(", "_command", ",", "txt_file", ")", ":", "command", "=", "_command", ".", "strip", "(", ")", "with", "open", "(", "txt_file", ",", "'w'", ")", "as", "txt", ":", "txt", ".", "writelines", "(", "command", ")" ]
34.8
5.6
def toroidal(target, mode='max', r_toroid=5e-6, target_Pc=None, num_points=1e2, surface_tension='pore.surface_tension', contact_angle='pore.contact_angle', throat_diameter='throat.diameter', touch_length='throat.touch_length'): r""" Calculate the filling angle (alpha) for a given capillary pressure Parameters ---------- target : OpenPNM Object The object for which these values are being calculated. This controls the length of the calculated array, and also provides access to other necessary thermofluid properties. mode : string (Default is 'max') Determines what information to send back. Options are: 'max' : the maximum capillary pressure along the throat axis 'touch' : the maximum capillary pressure a meniscus can sustain before touching a solid feature 'men' : return the meniscus info for a target pressure r_toroid : float or array_like The radius of the toroid surrounding the pore target_Pc : float The target capillary pressure num_points : float (Default 100) The number of divisions to make along the profile length to assess the meniscus properties in order to find target pressures, touch lengths, minima and maxima. surface_tension : dict key (string) The dictionary key containing the surface tension values to be used. If a pore property is given, it is interpolated to a throat list. contact_angle : dict key (string) The dictionary key containing the contact angle values to be used. If a pore property is given, it is interpolated to a throat list. throat_diameter : dict key (string) The dictionary key containing the throat diameter values to be used. touch_length : dict key (string) The dictionary key containing the maximum length that a meniscus can protrude into the connecting pore before touching a solid feature and therfore invading Notes ----- This approach accounts for the converging-diverging nature of many throat types. Advancing the meniscus beyond the apex of the toroid requires an increase in capillary pressure beyond that for a cylindical tube of the same radius. The details of this equation are described by Mason and Morrow [1]_, and explored by Gostick [2]_ in the context of a pore network model. References ---------- .. [1] G. Mason, N. R. Morrow, Effect of contact angle on capillary displacement curvatures in pore throats formed by spheres. J. Colloid Interface Sci. 168, 130 (1994). .. [2] J. Gostick, Random pore network modeling of fibrous PEMFC gas diffusion media using Voronoi and Delaunay tessellations. J. Electrochem. Soc. 160, F731 (2013). """ network = target.project.network phase = target.project.find_phase(target) element, sigma, theta = _get_key_props(phase=phase, diameter=throat_diameter, surface_tension=surface_tension, contact_angle=contact_angle) x, R, rt, s, t = syp.symbols('x, R, rt, s, t') # Equation of circle re-arranged for y y = R*syp.sqrt(1 - (x/R)**2) # Throat radius profile r = rt + (R-y) # Derivative of profile rprime = r.diff(x) # Filling angle alpha = syp.atan(rprime) # Radius of curvature of meniscus rm = r/syp.cos(alpha+t) # distance from center of curvature to meniscus contact point (Pythagoras) a = syp.sqrt(rm**2 - r**2) # angle between throat axis, meniscus center and meniscus contact point gamma = syp.atan(r/a) # Capillary Pressure f = -2*s*syp.cos(alpha+t)/r # Callable Functions rx = syp.lambdify((x, R, rt), r, 'numpy') fill_angle = syp.lambdify((x, R, rt), alpha, 'numpy') Pc = syp.lambdify((x, R, rt, s, t), f, 'numpy') rad_curve = syp.lambdify((x, R, rt, s, t), rm, 'numpy') c2x = syp.lambdify((x, R, rt, s, t), a, 'numpy') cap_angle = syp.lambdify((x, R, rt, s, t), gamma, 'numpy') # Contact Angle theta = np.deg2rad(theta) # Network properties throatRad = network[throat_diameter]/2 pos = np.arange(-r_toroid*0.999, r_toroid*0.999, r_toroid/num_points) fiberRad = np.ones(len(throatRad))*r_toroid # Now find the positions of the menisci along each throat axis Y, X = np.meshgrid(throatRad, pos) t_Pc = Pc(X, fiberRad, Y, sigma, theta) # Values of minima and maxima Pc_min = np.min(t_Pc, axis=0) Pc_max = np.max(t_Pc, axis=0) # Arguments of minima and maxima a_min = np.argmin(t_Pc, axis=0) a_max = np.argmax(t_Pc, axis=0) if mode == 'max': return Pc_max elif mode == 'touch': all_rad = rad_curve(X, fiberRad, Y, sigma, theta) all_c2x = c2x(X, fiberRad, Y, sigma, theta) all_cen = X + np.sign(all_rad)*all_c2x dist = all_cen + np.abs(all_rad) # Only count lengths where meniscus bulges into pore dist[all_rad > 0] = 0.0 touch_len = network[touch_length] mask = dist > touch_len arg_touch = np.argmax(mask, axis=0) # Make sure we only count ones that happen before max pressure # And above min pressure (which will be erroneous) arg_in_range = (arg_touch < a_max) * (arg_touch > a_min) arg_touch[~arg_in_range] = a_max[~arg_in_range] x_touch = pos[arg_touch] # Return the pressure at which a touch happens Pc_touch = Pc(x_touch, fiberRad, throatRad, sigma, theta) return Pc_touch elif target_Pc is None: logger.exception(msg='Please supply a target capillary pressure' + ' when mode is "men"') if np.abs(target_Pc) < 1.0: target_Pc = 1.0 inds = np.indices(np.shape(t_Pc)) # Change values outside the range between minima and maxima to be those # Values mask = inds[0] < np.ones(len(pos))[:, np.newaxis]*a_min t_Pc[mask] = (np.ones(len(pos))[:, np.newaxis]*Pc_min)[mask] mask = inds[0] > np.ones(len(pos))[:, np.newaxis]*a_max t_Pc[mask] = (np.ones(len(pos))[:, np.newaxis]*Pc_max)[mask] # Find the argument at or above the target Pressure mask = t_Pc >= target_Pc arg_x = np.argmax(mask, axis=0) # If outside range change to minima or maxima accordingly arg_x[target_Pc < Pc_min] = a_min[target_Pc < Pc_min] arg_x[target_Pc > Pc_max] = a_max[target_Pc > Pc_max] xpos = pos[arg_x] # Output men_data = {} men_data['pos'] = xpos men_data['rx'] = rx(xpos, fiberRad, throatRad) men_data['alpha'] = fill_angle(xpos, fiberRad, throatRad) men_data['alpha_min'] = fill_angle(pos[a_min], fiberRad, throatRad) men_data['alpha_max'] = fill_angle(pos[a_max], fiberRad, throatRad) men_data['c2x'] = c2x(xpos, fiberRad, throatRad, sigma, theta) men_data['gamma'] = cap_angle(xpos, fiberRad, throatRad, sigma, theta) men_data['radius'] = rad_curve(xpos, fiberRad, throatRad, sigma, theta) # xpos is relative to the throat center men_data['center'] = (xpos + np.sign(men_data['radius'])*men_data['c2x']) men_data['men_max'] = men_data['center'] - men_data['radius'] logger.info(mode+' calculated for Pc: '+str(target_Pc)) return men_data
[ "def", "toroidal", "(", "target", ",", "mode", "=", "'max'", ",", "r_toroid", "=", "5e-6", ",", "target_Pc", "=", "None", ",", "num_points", "=", "1e2", ",", "surface_tension", "=", "'pore.surface_tension'", ",", "contact_angle", "=", "'pore.contact_angle'", ",", "throat_diameter", "=", "'throat.diameter'", ",", "touch_length", "=", "'throat.touch_length'", ")", ":", "network", "=", "target", ".", "project", ".", "network", "phase", "=", "target", ".", "project", ".", "find_phase", "(", "target", ")", "element", ",", "sigma", ",", "theta", "=", "_get_key_props", "(", "phase", "=", "phase", ",", "diameter", "=", "throat_diameter", ",", "surface_tension", "=", "surface_tension", ",", "contact_angle", "=", "contact_angle", ")", "x", ",", "R", ",", "rt", ",", "s", ",", "t", "=", "syp", ".", "symbols", "(", "'x, R, rt, s, t'", ")", "# Equation of circle re-arranged for y", "y", "=", "R", "*", "syp", ".", "sqrt", "(", "1", "-", "(", "x", "/", "R", ")", "**", "2", ")", "# Throat radius profile", "r", "=", "rt", "+", "(", "R", "-", "y", ")", "# Derivative of profile", "rprime", "=", "r", ".", "diff", "(", "x", ")", "# Filling angle", "alpha", "=", "syp", ".", "atan", "(", "rprime", ")", "# Radius of curvature of meniscus", "rm", "=", "r", "/", "syp", ".", "cos", "(", "alpha", "+", "t", ")", "# distance from center of curvature to meniscus contact point (Pythagoras)", "a", "=", "syp", ".", "sqrt", "(", "rm", "**", "2", "-", "r", "**", "2", ")", "# angle between throat axis, meniscus center and meniscus contact point", "gamma", "=", "syp", ".", "atan", "(", "r", "/", "a", ")", "# Capillary Pressure", "f", "=", "-", "2", "*", "s", "*", "syp", ".", "cos", "(", "alpha", "+", "t", ")", "/", "r", "# Callable Functions", "rx", "=", "syp", ".", "lambdify", "(", "(", "x", ",", "R", ",", "rt", ")", ",", "r", ",", "'numpy'", ")", "fill_angle", "=", "syp", ".", "lambdify", "(", "(", "x", ",", "R", ",", "rt", ")", ",", "alpha", ",", "'numpy'", ")", "Pc", "=", "syp", ".", "lambdify", "(", "(", "x", ",", "R", ",", "rt", ",", "s", ",", "t", ")", ",", "f", ",", "'numpy'", ")", "rad_curve", "=", "syp", ".", "lambdify", "(", "(", "x", ",", "R", ",", "rt", ",", "s", ",", "t", ")", ",", "rm", ",", "'numpy'", ")", "c2x", "=", "syp", ".", "lambdify", "(", "(", "x", ",", "R", ",", "rt", ",", "s", ",", "t", ")", ",", "a", ",", "'numpy'", ")", "cap_angle", "=", "syp", ".", "lambdify", "(", "(", "x", ",", "R", ",", "rt", ",", "s", ",", "t", ")", ",", "gamma", ",", "'numpy'", ")", "# Contact Angle", "theta", "=", "np", ".", "deg2rad", "(", "theta", ")", "# Network properties", "throatRad", "=", "network", "[", "throat_diameter", "]", "/", "2", "pos", "=", "np", ".", "arange", "(", "-", "r_toroid", "*", "0.999", ",", "r_toroid", "*", "0.999", ",", "r_toroid", "/", "num_points", ")", "fiberRad", "=", "np", ".", "ones", "(", "len", "(", "throatRad", ")", ")", "*", "r_toroid", "# Now find the positions of the menisci along each throat axis", "Y", ",", "X", "=", "np", ".", "meshgrid", "(", "throatRad", ",", "pos", ")", "t_Pc", "=", "Pc", "(", "X", ",", "fiberRad", ",", "Y", ",", "sigma", ",", "theta", ")", "# Values of minima and maxima", "Pc_min", "=", "np", ".", "min", "(", "t_Pc", ",", "axis", "=", "0", ")", "Pc_max", "=", "np", ".", "max", "(", "t_Pc", ",", "axis", "=", "0", ")", "# Arguments of minima and maxima", "a_min", "=", "np", ".", "argmin", "(", "t_Pc", ",", "axis", "=", "0", ")", "a_max", "=", "np", ".", "argmax", "(", "t_Pc", ",", "axis", "=", "0", ")", "if", "mode", "==", "'max'", ":", "return", "Pc_max", "elif", "mode", "==", "'touch'", ":", "all_rad", "=", "rad_curve", "(", "X", ",", "fiberRad", ",", "Y", ",", "sigma", ",", "theta", ")", "all_c2x", "=", "c2x", "(", "X", ",", "fiberRad", ",", "Y", ",", "sigma", ",", "theta", ")", "all_cen", "=", "X", "+", "np", ".", "sign", "(", "all_rad", ")", "*", "all_c2x", "dist", "=", "all_cen", "+", "np", ".", "abs", "(", "all_rad", ")", "# Only count lengths where meniscus bulges into pore", "dist", "[", "all_rad", ">", "0", "]", "=", "0.0", "touch_len", "=", "network", "[", "touch_length", "]", "mask", "=", "dist", ">", "touch_len", "arg_touch", "=", "np", ".", "argmax", "(", "mask", ",", "axis", "=", "0", ")", "# Make sure we only count ones that happen before max pressure", "# And above min pressure (which will be erroneous)", "arg_in_range", "=", "(", "arg_touch", "<", "a_max", ")", "*", "(", "arg_touch", ">", "a_min", ")", "arg_touch", "[", "~", "arg_in_range", "]", "=", "a_max", "[", "~", "arg_in_range", "]", "x_touch", "=", "pos", "[", "arg_touch", "]", "# Return the pressure at which a touch happens", "Pc_touch", "=", "Pc", "(", "x_touch", ",", "fiberRad", ",", "throatRad", ",", "sigma", ",", "theta", ")", "return", "Pc_touch", "elif", "target_Pc", "is", "None", ":", "logger", ".", "exception", "(", "msg", "=", "'Please supply a target capillary pressure'", "+", "' when mode is \"men\"'", ")", "if", "np", ".", "abs", "(", "target_Pc", ")", "<", "1.0", ":", "target_Pc", "=", "1.0", "inds", "=", "np", ".", "indices", "(", "np", ".", "shape", "(", "t_Pc", ")", ")", "# Change values outside the range between minima and maxima to be those", "# Values", "mask", "=", "inds", "[", "0", "]", "<", "np", ".", "ones", "(", "len", "(", "pos", ")", ")", "[", ":", ",", "np", ".", "newaxis", "]", "*", "a_min", "t_Pc", "[", "mask", "]", "=", "(", "np", ".", "ones", "(", "len", "(", "pos", ")", ")", "[", ":", ",", "np", ".", "newaxis", "]", "*", "Pc_min", ")", "[", "mask", "]", "mask", "=", "inds", "[", "0", "]", ">", "np", ".", "ones", "(", "len", "(", "pos", ")", ")", "[", ":", ",", "np", ".", "newaxis", "]", "*", "a_max", "t_Pc", "[", "mask", "]", "=", "(", "np", ".", "ones", "(", "len", "(", "pos", ")", ")", "[", ":", ",", "np", ".", "newaxis", "]", "*", "Pc_max", ")", "[", "mask", "]", "# Find the argument at or above the target Pressure", "mask", "=", "t_Pc", ">=", "target_Pc", "arg_x", "=", "np", ".", "argmax", "(", "mask", ",", "axis", "=", "0", ")", "# If outside range change to minima or maxima accordingly", "arg_x", "[", "target_Pc", "<", "Pc_min", "]", "=", "a_min", "[", "target_Pc", "<", "Pc_min", "]", "arg_x", "[", "target_Pc", ">", "Pc_max", "]", "=", "a_max", "[", "target_Pc", ">", "Pc_max", "]", "xpos", "=", "pos", "[", "arg_x", "]", "# Output", "men_data", "=", "{", "}", "men_data", "[", "'pos'", "]", "=", "xpos", "men_data", "[", "'rx'", "]", "=", "rx", "(", "xpos", ",", "fiberRad", ",", "throatRad", ")", "men_data", "[", "'alpha'", "]", "=", "fill_angle", "(", "xpos", ",", "fiberRad", ",", "throatRad", ")", "men_data", "[", "'alpha_min'", "]", "=", "fill_angle", "(", "pos", "[", "a_min", "]", ",", "fiberRad", ",", "throatRad", ")", "men_data", "[", "'alpha_max'", "]", "=", "fill_angle", "(", "pos", "[", "a_max", "]", ",", "fiberRad", ",", "throatRad", ")", "men_data", "[", "'c2x'", "]", "=", "c2x", "(", "xpos", ",", "fiberRad", ",", "throatRad", ",", "sigma", ",", "theta", ")", "men_data", "[", "'gamma'", "]", "=", "cap_angle", "(", "xpos", ",", "fiberRad", ",", "throatRad", ",", "sigma", ",", "theta", ")", "men_data", "[", "'radius'", "]", "=", "rad_curve", "(", "xpos", ",", "fiberRad", ",", "throatRad", ",", "sigma", ",", "theta", ")", "# xpos is relative to the throat center", "men_data", "[", "'center'", "]", "=", "(", "xpos", "+", "np", ".", "sign", "(", "men_data", "[", "'radius'", "]", ")", "*", "men_data", "[", "'c2x'", "]", ")", "men_data", "[", "'men_max'", "]", "=", "men_data", "[", "'center'", "]", "-", "men_data", "[", "'radius'", "]", "logger", ".", "info", "(", "mode", "+", "' calculated for Pc: '", "+", "str", "(", "target_Pc", ")", ")", "return", "men_data" ]
42.091954
19.95977
def get(dic, path, seps=PATH_SEPS, idx_reg=_JSNP_GET_ARRAY_IDX_REG): """getter for nested dicts. :param dic: a dict[-like] object :param path: Path expression to point object wanted :param seps: Separator char candidates :return: A tuple of (result_object, error_message) >>> d = {'a': {'b': {'c': 0, 'd': [1, 2]}}, '': 3} >>> assert get(d, '/') == (3, '') # key becomes '' (empty string). >>> assert get(d, "/a/b/c") == (0, '') >>> sorted(get(d, "a.b")[0].items()) [('c', 0), ('d', [1, 2])] >>> (get(d, "a.b.d"), get(d, "/a/b/d/1")) (([1, 2], ''), (2, '')) >>> get(d, "a.b.key_not_exist") # doctest: +ELLIPSIS (None, "'...'") >>> get(d, "/a/b/d/2") (None, 'list index out of range') >>> get(d, "/a/b/d/-") # doctest: +ELLIPSIS (None, 'list indices must be integers...') """ items = [_jsnp_unescape(p) for p in _split_path(path, seps)] if not items: return (dic, '') try: if len(items) == 1: return (dic[items[0]], '') prnt = functools.reduce(operator.getitem, items[:-1], dic) arr = anyconfig.utils.is_list_like(prnt) and idx_reg.match(items[-1]) return (prnt[int(items[-1])], '') if arr else (prnt[items[-1]], '') except (TypeError, KeyError, IndexError) as exc: return (None, str(exc))
[ "def", "get", "(", "dic", ",", "path", ",", "seps", "=", "PATH_SEPS", ",", "idx_reg", "=", "_JSNP_GET_ARRAY_IDX_REG", ")", ":", "items", "=", "[", "_jsnp_unescape", "(", "p", ")", "for", "p", "in", "_split_path", "(", "path", ",", "seps", ")", "]", "if", "not", "items", ":", "return", "(", "dic", ",", "''", ")", "try", ":", "if", "len", "(", "items", ")", "==", "1", ":", "return", "(", "dic", "[", "items", "[", "0", "]", "]", ",", "''", ")", "prnt", "=", "functools", ".", "reduce", "(", "operator", ".", "getitem", ",", "items", "[", ":", "-", "1", "]", ",", "dic", ")", "arr", "=", "anyconfig", ".", "utils", ".", "is_list_like", "(", "prnt", ")", "and", "idx_reg", ".", "match", "(", "items", "[", "-", "1", "]", ")", "return", "(", "prnt", "[", "int", "(", "items", "[", "-", "1", "]", ")", "]", ",", "''", ")", "if", "arr", "else", "(", "prnt", "[", "items", "[", "-", "1", "]", "]", ",", "''", ")", "except", "(", "TypeError", ",", "KeyError", ",", "IndexError", ")", "as", "exc", ":", "return", "(", "None", ",", "str", "(", "exc", ")", ")" ]
37.514286
17.057143
def db_from_hass_config(path=None, **kwargs): """Initialize a database from HASS config.""" if path is None: path = config.find_hass_config() url = config.db_url_from_hass_config(path) return HassDatabase(url, **kwargs)
[ "def", "db_from_hass_config", "(", "path", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "path", "is", "None", ":", "path", "=", "config", ".", "find_hass_config", "(", ")", "url", "=", "config", ".", "db_url_from_hass_config", "(", "path", ")", "return", "HassDatabase", "(", "url", ",", "*", "*", "kwargs", ")" ]
34
10.428571
def process(): """Get process overview.""" pmi = ProcessMemoryInfo() threads = get_current_threads() return dict(info=pmi, threads=threads)
[ "def", "process", "(", ")", ":", "pmi", "=", "ProcessMemoryInfo", "(", ")", "threads", "=", "get_current_threads", "(", ")", "return", "dict", "(", "info", "=", "pmi", ",", "threads", "=", "threads", ")" ]
30.2
8.8
def _set_system_monitor_mail(self, v, load=False): """ Setter method for system_monitor_mail, mapped from YANG variable /system_monitor_mail (container) If this variable is read-only (config: false) in the source YANG file, then _set_system_monitor_mail is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_system_monitor_mail() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=system_monitor_mail.system_monitor_mail, is_container='container', presence=False, yang_name="system-monitor-mail", rest_name="system-monitor-mail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure FRU mail setting', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-system-monitor', defining_module='brocade-system-monitor', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """system_monitor_mail must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=system_monitor_mail.system_monitor_mail, is_container='container', presence=False, yang_name="system-monitor-mail", rest_name="system-monitor-mail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure FRU mail setting', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-system-monitor', defining_module='brocade-system-monitor', yang_type='container', is_config=True)""", }) self.__system_monitor_mail = t if hasattr(self, '_set'): self._set()
[ "def", "_set_system_monitor_mail", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "system_monitor_mail", ".", "system_monitor_mail", ",", "is_container", "=", "'container'", ",", "presence", "=", "False", ",", "yang_name", "=", "\"system-monitor-mail\"", ",", "rest_name", "=", "\"system-monitor-mail\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Configure FRU mail setting'", ",", "u'cli-incomplete-no'", ":", "None", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-system-monitor'", ",", "defining_module", "=", "'brocade-system-monitor'", ",", "yang_type", "=", "'container'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"system_monitor_mail must be of a type compatible with container\"\"\"", ",", "'defined-type'", ":", "\"container\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=system_monitor_mail.system_monitor_mail, is_container='container', presence=False, yang_name=\"system-monitor-mail\", rest_name=\"system-monitor-mail\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure FRU mail setting', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-system-monitor', defining_module='brocade-system-monitor', yang_type='container', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__system_monitor_mail", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
82.227273
38.090909
def validate(self): """ Perform validation check on properties. """ if not self.api_token or not self.api_token_secret: raise ImproperlyConfigured("'api_token' and 'api_token_secret' are required for authentication.") if self.response_type not in ["json", "pson", "xml", "debug", None]: raise ImproperlyConfigured("'%s' is an invalid response_type" % self.response_type)
[ "def", "validate", "(", "self", ")", ":", "if", "not", "self", ".", "api_token", "or", "not", "self", ".", "api_token_secret", ":", "raise", "ImproperlyConfigured", "(", "\"'api_token' and 'api_token_secret' are required for authentication.\"", ")", "if", "self", ".", "response_type", "not", "in", "[", "\"json\"", ",", "\"pson\"", ",", "\"xml\"", ",", "\"debug\"", ",", "None", "]", ":", "raise", "ImproperlyConfigured", "(", "\"'%s' is an invalid response_type\"", "%", "self", ".", "response_type", ")" ]
47.444444
27.444444
def search(self, CorpNum, MgtKeyType, DType, SDate, EDate, State, Type, TaxType, LateOnly, TaxRegIDYN, TaxRegIDType, TaxRegID, Page, PerPage, Order, UserID=None, QString=None, InterOPYN=None, IssueType=None): """ 목록 조회 args CorpNum : 팝빌회원 사업자번호 MgtKeyType : 세금계산서유형, SELL-매출, BUY-매입, TRUSTEE-위수탁 DType : 일자유형, R-등록일시, W-작성일자, I-발행일시 중 택 1 SDate : 시작일자, 표시형식(yyyyMMdd) EDate : 종료일자, 표시형식(yyyyMMdd) State : 상태코드, 2,3번째 자리에 와일드카드(*) 사용가능 Type : 문서형태 배열, N-일반세금계산서, M-수정세금계산서 TaxType : 과세형태 배열, T-과세, N-면세, Z-영세 LateOnly : 지연발행, 공백-전체조회, 0-정상발행조회, 1-지연발행 조회 TaxRegIdYN : 종사업장번호 유무, 공백-전체조회, 0-종사업장번호 없음 1-종사업장번호 있음 TaxRegIDType : 종사업장번호 사업자유형, S-공급자, B-공급받는자, T-수탁자 TaxRegID : 종사업장번호, 콤마(,)로 구분하여 구성 ex)'0001,1234' Page : 페이지번호 PerPage : 페이지당 목록개수 Order : 정렬방향, D-내림차순, A-오름차순 UserID : 팝빌 회원아이디 QString : 거래처 정보, 거래처 상호 또는 사업자등록번호 기재, 미기재시 전체조회 InterOPYN : 연동문서 여부, 공백-전체조회, 0-일반문서 조회, 1-연동문서 조회 IssueType : 발행형태 배열, N-정발행, R-역발행, T-위수탁 return 조회목록 Object raise PopbillException """ if MgtKeyType not in self.__MgtKeyTypes: raise PopbillException(-99999999, "관리번호 형태가 올바르지 않습니다.") if DType == None or DType == '': raise PopbillException(-99999999, "일자유형이 입력되지 않았습니다.") if SDate == None or SDate == '': raise PopbillException(-99999999, "시작일자가 입력되지 않았습니다.") if EDate == None or EDate == '': raise PopbillException(-99999999, "종료일자가 입력되지 않았습니다.") uri = '/Taxinvoice/' + MgtKeyType uri += '?DType=' + DType uri += '&SDate=' + SDate uri += '&EDate=' + EDate uri += '&State=' + ','.join(State) uri += '&Type=' + ','.join(Type) uri += '&TaxType=' + ','.join(TaxType) uri += '&TaxRegIDType=' + TaxRegIDType uri += '&TaxRegID=' + TaxRegID uri += '&Page=' + str(Page) uri += '&PerPage=' + str(PerPage) uri += '&Order=' + Order uri += '&InterOPYN=' + InterOPYN if LateOnly != '': uri += '&LateOnly=' + LateOnly if TaxRegIDYN != '': uri += '&TaxRegIDType=' + TaxRegIDType if QString is not None: uri += '&QString=' + QString if IssueType is not None: uri += '&IssueType=' + ','.join(IssueType) return self._httpget(uri, CorpNum, UserID)
[ "def", "search", "(", "self", ",", "CorpNum", ",", "MgtKeyType", ",", "DType", ",", "SDate", ",", "EDate", ",", "State", ",", "Type", ",", "TaxType", ",", "LateOnly", ",", "TaxRegIDYN", ",", "TaxRegIDType", ",", "TaxRegID", ",", "Page", ",", "PerPage", ",", "Order", ",", "UserID", "=", "None", ",", "QString", "=", "None", ",", "InterOPYN", "=", "None", ",", "IssueType", "=", "None", ")", ":", "if", "MgtKeyType", "not", "in", "self", ".", "__MgtKeyTypes", ":", "raise", "PopbillException", "(", "-", "99999999", ",", "\"관리번호 형태가 올바르지 않습니다.\")", "", "if", "DType", "==", "None", "or", "DType", "==", "''", ":", "raise", "PopbillException", "(", "-", "99999999", ",", "\"일자유형이 입력되지 않았습니다.\")", "", "if", "SDate", "==", "None", "or", "SDate", "==", "''", ":", "raise", "PopbillException", "(", "-", "99999999", ",", "\"시작일자가 입력되지 않았습니다.\")", "", "if", "EDate", "==", "None", "or", "EDate", "==", "''", ":", "raise", "PopbillException", "(", "-", "99999999", ",", "\"종료일자가 입력되지 않았습니다.\")", "", "uri", "=", "'/Taxinvoice/'", "+", "MgtKeyType", "uri", "+=", "'?DType='", "+", "DType", "uri", "+=", "'&SDate='", "+", "SDate", "uri", "+=", "'&EDate='", "+", "EDate", "uri", "+=", "'&State='", "+", "','", ".", "join", "(", "State", ")", "uri", "+=", "'&Type='", "+", "','", ".", "join", "(", "Type", ")", "uri", "+=", "'&TaxType='", "+", "','", ".", "join", "(", "TaxType", ")", "uri", "+=", "'&TaxRegIDType='", "+", "TaxRegIDType", "uri", "+=", "'&TaxRegID='", "+", "TaxRegID", "uri", "+=", "'&Page='", "+", "str", "(", "Page", ")", "uri", "+=", "'&PerPage='", "+", "str", "(", "PerPage", ")", "uri", "+=", "'&Order='", "+", "Order", "uri", "+=", "'&InterOPYN='", "+", "InterOPYN", "if", "LateOnly", "!=", "''", ":", "uri", "+=", "'&LateOnly='", "+", "LateOnly", "if", "TaxRegIDYN", "!=", "''", ":", "uri", "+=", "'&TaxRegIDType='", "+", "TaxRegIDType", "if", "QString", "is", "not", "None", ":", "uri", "+=", "'&QString='", "+", "QString", "if", "IssueType", "is", "not", "None", ":", "uri", "+=", "'&IssueType='", "+", "','", ".", "join", "(", "IssueType", ")", "return", "self", ".", "_httpget", "(", "uri", ",", "CorpNum", ",", "UserID", ")" ]
39.38806
16.850746
def divConn (self, preCellsTags, postCellsTags, connParam): from .. import sim ''' Generates connections between all pre and post-syn cells based on probability values''' if sim.cfg.verbose: print('Generating set of divergent connections (rule: %s) ...' % (connParam['label'])) # get list of params that have a lambda function paramsStrFunc = [param for param in [p+'Func' for p in self.connStringFuncParams] if param in connParam] # copy the vars into args immediately and work out which keys are associated with lambda functions only once per method funcKeys = {} for paramStrFunc in paramsStrFunc: connParam[paramStrFunc + 'Args'] = connParam[paramStrFunc + 'Vars'].copy() funcKeys[paramStrFunc] = [key for key in connParam[paramStrFunc + 'Vars'] if callable(connParam[paramStrFunc + 'Vars'][key])] # converted to list only once postCellsTagsKeys = sorted(postCellsTags) # calculate hash for post cell gids hashPostCells = sim.hashList(postCellsTagsKeys) for preCellGid, preCellTags in preCellsTags.items(): # for each presyn cell divergence = connParam['divergenceFunc'][preCellGid] if 'divergenceFunc' in connParam else connParam['divergence'] # num of presyn conns / postsyn cell divergence = max(min(int(round(divergence)), len(postCellsTags)-1), 0) self.rand.Random123(hashPostCells, preCellGid, sim.cfg.seeds['conn']) # init randomizer randSample = self.randUniqueInt(self.rand, divergence+1, 0, len(postCellsTags)-1) # note: randSample[divergence] is an extra value used only if one of the random postGids coincided with the preGid postCellsSample = {postCellsTagsKeys[randSample[divergence]] if postCellsTagsKeys[i]==preCellGid else postCellsTagsKeys[i]: 0 for i in randSample[0:divergence]} # dict of selected gids of postsyn cells with removed pre gid for postCellGid in [c for c in postCellsSample if c in self.gid2lid]: postCellTags = postCellsTags[postCellGid] for paramStrFunc in paramsStrFunc: # call lambda functions to get weight func args # update the relevant FuncArgs dict where lambda functions are known to exist in the corresponding FuncVars dict for funcKey in funcKeys[paramStrFunc]: connParam[paramStrFunc + 'Args'][funcKey] = connParam[paramStrFunc+'Vars'][funcKey](preCellTags,postCellTags) if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid)
[ "def", "divConn", "(", "self", ",", "preCellsTags", ",", "postCellsTags", ",", "connParam", ")", ":", "from", ".", ".", "import", "sim", "if", "sim", ".", "cfg", ".", "verbose", ":", "print", "(", "'Generating set of divergent connections (rule: %s) ...'", "%", "(", "connParam", "[", "'label'", "]", ")", ")", "# get list of params that have a lambda function", "paramsStrFunc", "=", "[", "param", "for", "param", "in", "[", "p", "+", "'Func'", "for", "p", "in", "self", ".", "connStringFuncParams", "]", "if", "param", "in", "connParam", "]", "# copy the vars into args immediately and work out which keys are associated with lambda functions only once per method", "funcKeys", "=", "{", "}", "for", "paramStrFunc", "in", "paramsStrFunc", ":", "connParam", "[", "paramStrFunc", "+", "'Args'", "]", "=", "connParam", "[", "paramStrFunc", "+", "'Vars'", "]", ".", "copy", "(", ")", "funcKeys", "[", "paramStrFunc", "]", "=", "[", "key", "for", "key", "in", "connParam", "[", "paramStrFunc", "+", "'Vars'", "]", "if", "callable", "(", "connParam", "[", "paramStrFunc", "+", "'Vars'", "]", "[", "key", "]", ")", "]", "# converted to list only once ", "postCellsTagsKeys", "=", "sorted", "(", "postCellsTags", ")", "# calculate hash for post cell gids", "hashPostCells", "=", "sim", ".", "hashList", "(", "postCellsTagsKeys", ")", "for", "preCellGid", ",", "preCellTags", "in", "preCellsTags", ".", "items", "(", ")", ":", "# for each presyn cell", "divergence", "=", "connParam", "[", "'divergenceFunc'", "]", "[", "preCellGid", "]", "if", "'divergenceFunc'", "in", "connParam", "else", "connParam", "[", "'divergence'", "]", "# num of presyn conns / postsyn cell", "divergence", "=", "max", "(", "min", "(", "int", "(", "round", "(", "divergence", ")", ")", ",", "len", "(", "postCellsTags", ")", "-", "1", ")", ",", "0", ")", "self", ".", "rand", ".", "Random123", "(", "hashPostCells", ",", "preCellGid", ",", "sim", ".", "cfg", ".", "seeds", "[", "'conn'", "]", ")", "# init randomizer", "randSample", "=", "self", ".", "randUniqueInt", "(", "self", ".", "rand", ",", "divergence", "+", "1", ",", "0", ",", "len", "(", "postCellsTags", ")", "-", "1", ")", "# note: randSample[divergence] is an extra value used only if one of the random postGids coincided with the preGid ", "postCellsSample", "=", "{", "postCellsTagsKeys", "[", "randSample", "[", "divergence", "]", "]", "if", "postCellsTagsKeys", "[", "i", "]", "==", "preCellGid", "else", "postCellsTagsKeys", "[", "i", "]", ":", "0", "for", "i", "in", "randSample", "[", "0", ":", "divergence", "]", "}", "# dict of selected gids of postsyn cells with removed pre gid", "for", "postCellGid", "in", "[", "c", "for", "c", "in", "postCellsSample", "if", "c", "in", "self", ".", "gid2lid", "]", ":", "postCellTags", "=", "postCellsTags", "[", "postCellGid", "]", "for", "paramStrFunc", "in", "paramsStrFunc", ":", "# call lambda functions to get weight func args", "# update the relevant FuncArgs dict where lambda functions are known to exist in the corresponding FuncVars dict", "for", "funcKey", "in", "funcKeys", "[", "paramStrFunc", "]", ":", "connParam", "[", "paramStrFunc", "+", "'Args'", "]", "[", "funcKey", "]", "=", "connParam", "[", "paramStrFunc", "+", "'Vars'", "]", "[", "funcKey", "]", "(", "preCellTags", ",", "postCellTags", ")", "if", "preCellGid", "!=", "postCellGid", ":", "# if not self-connection", "self", ".", "_addCellConn", "(", "connParam", ",", "preCellGid", ",", "postCellGid", ")" ]
64.875
44.725
def enqueue(self, pipeline): """ Start a pipeline. :param pipeline: Start this pipeline. """ copied = Pipeline().append(pipeline) copied.group = self self._queue.put(copied)
[ "def", "enqueue", "(", "self", ",", "pipeline", ")", ":", "copied", "=", "Pipeline", "(", ")", ".", "append", "(", "pipeline", ")", "copied", ".", "group", "=", "self", "self", ".", "_queue", ".", "put", "(", "copied", ")" ]
26.875
10.375
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'input') and self.input is not None: _dict['input'] = self.input._to_dict() if hasattr(self, 'intents') and self.intents is not None: _dict['intents'] = [x._to_dict() for x in self.intents] if hasattr(self, 'entities') and self.entities is not None: _dict['entities'] = [x._to_dict() for x in self.entities] return _dict
[ "def", "_to_dict", "(", "self", ")", ":", "_dict", "=", "{", "}", "if", "hasattr", "(", "self", ",", "'input'", ")", "and", "self", ".", "input", "is", "not", "None", ":", "_dict", "[", "'input'", "]", "=", "self", ".", "input", ".", "_to_dict", "(", ")", "if", "hasattr", "(", "self", ",", "'intents'", ")", "and", "self", ".", "intents", "is", "not", "None", ":", "_dict", "[", "'intents'", "]", "=", "[", "x", ".", "_to_dict", "(", ")", "for", "x", "in", "self", ".", "intents", "]", "if", "hasattr", "(", "self", ",", "'entities'", ")", "and", "self", ".", "entities", "is", "not", "None", ":", "_dict", "[", "'entities'", "]", "=", "[", "x", ".", "_to_dict", "(", ")", "for", "x", "in", "self", ".", "entities", "]", "return", "_dict" ]
49.9
20.2
def process_subprotocol( self, headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]] ) -> Optional[Subprotocol]: """ Handle the Sec-WebSocket-Protocol HTTP request header. Return Sec-WebSocket-Protocol HTTP response header, which is the same as the selected subprotocol. """ subprotocol: Optional[Subprotocol] = None header_values = headers.get_all("Sec-WebSocket-Protocol") if header_values and available_subprotocols: parsed_header_values: List[Subprotocol] = sum( [parse_subprotocol(header_value) for header_value in header_values], [] ) subprotocol = self.select_subprotocol( parsed_header_values, available_subprotocols ) return subprotocol
[ "def", "process_subprotocol", "(", "self", ",", "headers", ":", "Headers", ",", "available_subprotocols", ":", "Optional", "[", "Sequence", "[", "Subprotocol", "]", "]", ")", "->", "Optional", "[", "Subprotocol", "]", ":", "subprotocol", ":", "Optional", "[", "Subprotocol", "]", "=", "None", "header_values", "=", "headers", ".", "get_all", "(", "\"Sec-WebSocket-Protocol\"", ")", "if", "header_values", "and", "available_subprotocols", ":", "parsed_header_values", ":", "List", "[", "Subprotocol", "]", "=", "sum", "(", "[", "parse_subprotocol", "(", "header_value", ")", "for", "header_value", "in", "header_values", "]", ",", "[", "]", ")", "subprotocol", "=", "self", ".", "select_subprotocol", "(", "parsed_header_values", ",", "available_subprotocols", ")", "return", "subprotocol" ]
32.48
24.96
def skew(xi): """Return the skew-symmetric matrix that can be used to calculate cross-products with vector xi. Multiplying this matrix by a vector `v` gives the same result as `xi x v`. Parameters ---------- xi : :obj:`numpy.ndarray` of float A 3-entry vector. Returns ------- :obj:`numpy.ndarray` of float The 3x3 skew-symmetric cross product matrix for the vector. """ S = np.array([[0, -xi[2], xi[1]], [xi[2], 0, -xi[0]], [-xi[1], xi[0], 0]]) return S
[ "def", "skew", "(", "xi", ")", ":", "S", "=", "np", ".", "array", "(", "[", "[", "0", ",", "-", "xi", "[", "2", "]", ",", "xi", "[", "1", "]", "]", ",", "[", "xi", "[", "2", "]", ",", "0", ",", "-", "xi", "[", "0", "]", "]", ",", "[", "-", "xi", "[", "1", "]", ",", "xi", "[", "0", "]", ",", "0", "]", "]", ")", "return", "S" ]
25.761905
19
def schemaNewParserCtxt(URL): """Create an XML Schemas parse context for that file/resource expected to contain an XML Schemas file. """ ret = libxml2mod.xmlSchemaNewParserCtxt(URL) if ret is None:raise parserError('xmlSchemaNewParserCtxt() failed') return SchemaParserCtxt(_obj=ret)
[ "def", "schemaNewParserCtxt", "(", "URL", ")", ":", "ret", "=", "libxml2mod", ".", "xmlSchemaNewParserCtxt", "(", "URL", ")", "if", "ret", "is", "None", ":", "raise", "parserError", "(", "'xmlSchemaNewParserCtxt() failed'", ")", "return", "SchemaParserCtxt", "(", "_obj", "=", "ret", ")" ]
50.166667
8.833333
def screen(args): """ %prog screen scaffolds.fasta library.fasta Screen sequences against FASTA library. Sequences that have 95% id and 50% cov will be removed by default. """ from jcvi.apps.align import blast from jcvi.formats.blast import covfilter p = OptionParser(screen.__doc__) p.set_align(pctid=95, pctcov=50) p.add_option("--best", default=1, type="int", help="Get the best N hit [default: %default]") opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) scaffolds, library = args pctidflag = "--pctid={0}".format(opts.pctid) blastfile = blast([library, scaffolds, pctidflag, "--best={0}".format(opts.best)]) idsfile = blastfile.rsplit(".", 1)[0] + ".ids" covfilter([blastfile, scaffolds, "--ids=" + idsfile, pctidflag, "--pctcov={0}".format(opts.pctcov)]) pf = scaffolds.rsplit(".", 1)[0] nf = pf + ".screen.fasta" cmd = "faSomeRecords {0} -exclude {1} {2}".format(scaffolds, idsfile, nf) sh(cmd) logging.debug("Screened FASTA written to `{0}`.".format(nf)) return nf
[ "def", "screen", "(", "args", ")", ":", "from", "jcvi", ".", "apps", ".", "align", "import", "blast", "from", "jcvi", ".", "formats", ".", "blast", "import", "covfilter", "p", "=", "OptionParser", "(", "screen", ".", "__doc__", ")", "p", ".", "set_align", "(", "pctid", "=", "95", ",", "pctcov", "=", "50", ")", "p", ".", "add_option", "(", "\"--best\"", ",", "default", "=", "1", ",", "type", "=", "\"int\"", ",", "help", "=", "\"Get the best N hit [default: %default]\"", ")", "opts", ",", "args", "=", "p", ".", "parse_args", "(", "args", ")", "if", "len", "(", "args", ")", "!=", "2", ":", "sys", ".", "exit", "(", "not", "p", ".", "print_help", "(", ")", ")", "scaffolds", ",", "library", "=", "args", "pctidflag", "=", "\"--pctid={0}\"", ".", "format", "(", "opts", ".", "pctid", ")", "blastfile", "=", "blast", "(", "[", "library", ",", "scaffolds", ",", "pctidflag", ",", "\"--best={0}\"", ".", "format", "(", "opts", ".", "best", ")", "]", ")", "idsfile", "=", "blastfile", ".", "rsplit", "(", "\".\"", ",", "1", ")", "[", "0", "]", "+", "\".ids\"", "covfilter", "(", "[", "blastfile", ",", "scaffolds", ",", "\"--ids=\"", "+", "idsfile", ",", "pctidflag", ",", "\"--pctcov={0}\"", ".", "format", "(", "opts", ".", "pctcov", ")", "]", ")", "pf", "=", "scaffolds", ".", "rsplit", "(", "\".\"", ",", "1", ")", "[", "0", "]", "nf", "=", "pf", "+", "\".screen.fasta\"", "cmd", "=", "\"faSomeRecords {0} -exclude {1} {2}\"", ".", "format", "(", "scaffolds", ",", "idsfile", ",", "nf", ")", "sh", "(", "cmd", ")", "logging", ".", "debug", "(", "\"Screened FASTA written to `{0}`.\"", ".", "format", "(", "nf", ")", ")", "return", "nf" ]
31.055556
18.888889
def onMessage(self, client, userdata, msg): """! The callback for when a PUBLISH message is received from the server. @param client @param userdata @param msg """ dataIdentifier = DataIdentifier(self.broker, msg.topic) self.dataHandler.onNewData(dataIdentifier, msg.payload)
[ "def", "onMessage", "(", "self", ",", "client", ",", "userdata", ",", "msg", ")", ":", "dataIdentifier", "=", "DataIdentifier", "(", "self", ".", "broker", ",", "msg", ".", "topic", ")", "self", ".", "dataHandler", ".", "onNewData", "(", "dataIdentifier", ",", "msg", ".", "payload", ")" ]
33
18.3
def safe_filename(filename, replacement="_"): """ Replace unsafe filename characters with underscores. Note that this does not test for "legal" names accepted, but a more restricted set of: Letters, numbers, spaces, hyphens, underscores and periods. :param filename: name of a file as a string :param replacement: character to use as a replacement of bad characters :return: safe filename string """ if not isinstance(filename, str): raise TypeError("filename must be a string") if regex.path.linux.filename.search(filename): return filename safe_name = "" for char in filename: safe_name += char if regex.path.linux.filename.search(char) \ else replacement return safe_name
[ "def", "safe_filename", "(", "filename", ",", "replacement", "=", "\"_\"", ")", ":", "if", "not", "isinstance", "(", "filename", ",", "str", ")", ":", "raise", "TypeError", "(", "\"filename must be a string\"", ")", "if", "regex", ".", "path", ".", "linux", ".", "filename", ".", "search", "(", "filename", ")", ":", "return", "filename", "safe_name", "=", "\"\"", "for", "char", "in", "filename", ":", "safe_name", "+=", "char", "if", "regex", ".", "path", ".", "linux", ".", "filename", ".", "search", "(", "char", ")", "else", "replacement", "return", "safe_name" ]
39.210526
17
def find_related_imports(self, fullname): """ Return a list of non-stdlib modules that are directly imported by `fullname`, plus their parents. The list is determined by retrieving the source code of `fullname`, compiling it, and examining all IMPORT_NAME ops. :param fullname: Fully qualified name of an _already imported_ module for which source code can be retrieved :type fullname: str """ related = self._related_cache.get(fullname) if related is not None: return related modpath, src, _ = self.get_module_source(fullname) if src is None: return [] maybe_names = list(self.generate_parent_names(fullname)) co = compile(src, modpath, 'exec') for level, modname, namelist in scan_code_imports(co): if level == -1: modnames = [modname, '%s.%s' % (fullname, modname)] else: modnames = [ '%s%s' % (self.resolve_relpath(fullname, level), modname) ] maybe_names.extend(modnames) maybe_names.extend( '%s.%s' % (mname, name) for mname in modnames for name in namelist ) return self._related_cache.setdefault(fullname, sorted( set( mitogen.core.to_text(name) for name in maybe_names if sys.modules.get(name) is not None and not is_stdlib_name(name) and u'six.moves' not in name # TODO: crap ) ))
[ "def", "find_related_imports", "(", "self", ",", "fullname", ")", ":", "related", "=", "self", ".", "_related_cache", ".", "get", "(", "fullname", ")", "if", "related", "is", "not", "None", ":", "return", "related", "modpath", ",", "src", ",", "_", "=", "self", ".", "get_module_source", "(", "fullname", ")", "if", "src", "is", "None", ":", "return", "[", "]", "maybe_names", "=", "list", "(", "self", ".", "generate_parent_names", "(", "fullname", ")", ")", "co", "=", "compile", "(", "src", ",", "modpath", ",", "'exec'", ")", "for", "level", ",", "modname", ",", "namelist", "in", "scan_code_imports", "(", "co", ")", ":", "if", "level", "==", "-", "1", ":", "modnames", "=", "[", "modname", ",", "'%s.%s'", "%", "(", "fullname", ",", "modname", ")", "]", "else", ":", "modnames", "=", "[", "'%s%s'", "%", "(", "self", ".", "resolve_relpath", "(", "fullname", ",", "level", ")", ",", "modname", ")", "]", "maybe_names", ".", "extend", "(", "modnames", ")", "maybe_names", ".", "extend", "(", "'%s.%s'", "%", "(", "mname", ",", "name", ")", "for", "mname", "in", "modnames", "for", "name", "in", "namelist", ")", "return", "self", ".", "_related_cache", ".", "setdefault", "(", "fullname", ",", "sorted", "(", "set", "(", "mitogen", ".", "core", ".", "to_text", "(", "name", ")", "for", "name", "in", "maybe_names", "if", "sys", ".", "modules", ".", "get", "(", "name", ")", "is", "not", "None", "and", "not", "is_stdlib_name", "(", "name", ")", "and", "u'six.moves'", "not", "in", "name", "# TODO: crap", ")", ")", ")" ]
34.12766
18.765957
def parse(self, tokens): '''Parses a list of tokens into a JSON-serializable object. The parsing proceeds from left to right and is greedy. Precedence order: 1. Parameters with active context. For example, an Option with nargs=-1 will gobble all the remaining tokens. 2. Subcommands. 3. Parameters. The keys of the returned object are the names of parameters or subcommands. Subcommands are encoded as nested objects. Multiple parameters are encoded as lists. All other values are encoded as parameter-specified data types, or strings if not specified. ''' self._ping_main() # Pre-parsing: # 1. Expand globbed options: -abc --> -a -b -c def is_globbed(s): return len(s) > 2 and s.startswith('-') and not s.startswith('--') expanded = [["-" + c for c in list(token[1:])] if is_globbed(token) else [token] for token in tokens] # Parsing: pass off to main command after flattening expanded tokens list return self._main.parse(list(itertools.chain.from_iterable(expanded)))
[ "def", "parse", "(", "self", ",", "tokens", ")", ":", "self", ".", "_ping_main", "(", ")", "# Pre-parsing:", "# 1. Expand globbed options: -abc --> -a -b -c", "def", "is_globbed", "(", "s", ")", ":", "return", "len", "(", "s", ")", ">", "2", "and", "s", ".", "startswith", "(", "'-'", ")", "and", "not", "s", ".", "startswith", "(", "'--'", ")", "expanded", "=", "[", "[", "\"-\"", "+", "c", "for", "c", "in", "list", "(", "token", "[", "1", ":", "]", ")", "]", "if", "is_globbed", "(", "token", ")", "else", "[", "token", "]", "for", "token", "in", "tokens", "]", "# Parsing: pass off to main command after flattening expanded tokens list", "return", "self", ".", "_main", ".", "parse", "(", "list", "(", "itertools", ".", "chain", ".", "from_iterable", "(", "expanded", ")", ")", ")" ]
38.576923
27.807692
def finalize(self, outcome=None): """Finalize state This method is called when the run method finishes :param rafcon.core.logical_port.Outcome outcome: final outcome of the state :return: Nothing for the moment """ # Set the final outcome of the state if outcome is not None: self.final_outcome = outcome # If we are within a concurrency state, we have to notify it about our finalization if self.concurrency_queue: self.concurrency_queue.put(self.state_id) logger.debug("Finished execution of {0}: {1}".format(self, self.final_outcome)) return None
[ "def", "finalize", "(", "self", ",", "outcome", "=", "None", ")", ":", "# Set the final outcome of the state", "if", "outcome", "is", "not", "None", ":", "self", ".", "final_outcome", "=", "outcome", "# If we are within a concurrency state, we have to notify it about our finalization", "if", "self", ".", "concurrency_queue", ":", "self", ".", "concurrency_queue", ".", "put", "(", "self", ".", "state_id", ")", "logger", ".", "debug", "(", "\"Finished execution of {0}: {1}\"", ".", "format", "(", "self", ",", "self", ".", "final_outcome", ")", ")", "return", "None" ]
32.4
23
def encipher(self,string): """Encipher string using Enigma M3 cipher according to initialised key. Punctuation and whitespace are removed from the input. Example:: ciphertext = Enigma(settings=('A','A','A'),rotors=(1,2,3),reflector='B', ringstellung=('F','V','N'),steckers=[('P','O'),('M','L'), ('I','U'),('K','J'),('N','H'),('Y','T'),('G','B'),('V','F'), ('R','E'),('D','C')])).encipher(plaintext) :param string: The string to encipher. :returns: The enciphered string. """ string = self.remove_punctuation(string) ret = '' for c in string.upper(): if c.isalpha(): ret += self.encipher_char(c) else: ret += c return ret
[ "def", "encipher", "(", "self", ",", "string", ")", ":", "string", "=", "self", ".", "remove_punctuation", "(", "string", ")", "ret", "=", "''", "for", "c", "in", "string", ".", "upper", "(", ")", ":", "if", "c", ".", "isalpha", "(", ")", ":", "ret", "+=", "self", ".", "encipher_char", "(", "c", ")", "else", ":", "ret", "+=", "c", "return", "ret" ]
39.5
19.8
def get_resource_class_terminal_attribute_iterator(rc): """ Returns an iterator over all terminal attributes in the given registered resource. """ for attr in itervalues_(rc.__everest_attributes__): if attr.kind == RESOURCE_ATTRIBUTE_KINDS.TERMINAL: yield attr
[ "def", "get_resource_class_terminal_attribute_iterator", "(", "rc", ")", ":", "for", "attr", "in", "itervalues_", "(", "rc", ".", "__everest_attributes__", ")", ":", "if", "attr", ".", "kind", "==", "RESOURCE_ATTRIBUTE_KINDS", ".", "TERMINAL", ":", "yield", "attr" ]
36.625
16.125
def _parse_spectra_annotation(self, line): """Parse and store the spectral annotation details """ if re.match('^PK\$NUM_PEAK(.*)', line, re.IGNORECASE): self.start_spectra_annotation = False return saplist = line.split() sarow = ( self.current_id_spectra_annotation, float(saplist[self.spectra_annotation_indexes['m/z']]) if 'm/z' in self.spectra_annotation_indexes else None, saplist[self.spectra_annotation_indexes[ 'tentative_formula']] if 'tentative_formula' in self.spectra_annotation_indexes else None, float(saplist[self.spectra_annotation_indexes[ 'mass_error(ppm)']]) if 'mass_error(ppm)' in self.spectra_annotation_indexes else None, self.current_id_meta) self.spectra_annotation_all.append(sarow) self.current_id_spectra_annotation += 1
[ "def", "_parse_spectra_annotation", "(", "self", ",", "line", ")", ":", "if", "re", ".", "match", "(", "'^PK\\$NUM_PEAK(.*)'", ",", "line", ",", "re", ".", "IGNORECASE", ")", ":", "self", ".", "start_spectra_annotation", "=", "False", "return", "saplist", "=", "line", ".", "split", "(", ")", "sarow", "=", "(", "self", ".", "current_id_spectra_annotation", ",", "float", "(", "saplist", "[", "self", ".", "spectra_annotation_indexes", "[", "'m/z'", "]", "]", ")", "if", "'m/z'", "in", "self", ".", "spectra_annotation_indexes", "else", "None", ",", "saplist", "[", "self", ".", "spectra_annotation_indexes", "[", "'tentative_formula'", "]", "]", "if", "'tentative_formula'", "in", "self", ".", "spectra_annotation_indexes", "else", "None", ",", "float", "(", "saplist", "[", "self", ".", "spectra_annotation_indexes", "[", "'mass_error(ppm)'", "]", "]", ")", "if", "'mass_error(ppm)'", "in", "self", ".", "spectra_annotation_indexes", "else", "None", ",", "self", ".", "current_id_meta", ")", "self", ".", "spectra_annotation_all", ".", "append", "(", "sarow", ")", "self", ".", "current_id_spectra_annotation", "+=", "1" ]
43
24.666667
def reference(self, reference): """ Sets the reference of this CreateCertificateIssuerConfig. The certificate name, as created in the factory, to which the certificate issuer configuration applies. The following names are reserved and cannot be configured: LwM2M, BOOTSTRAP. :param reference: The reference of this CreateCertificateIssuerConfig. :type: str """ if reference is None: raise ValueError("Invalid value for `reference`, must not be `None`") if reference is not None and len(reference) > 50: raise ValueError("Invalid value for `reference`, length must be less than or equal to `50`") if reference is not None and not re.search('(?!mbed\\.)[\\w-_.]{1,50}', reference): raise ValueError("Invalid value for `reference`, must be a follow pattern or equal to `/(?!mbed\\.)[\\w-_.]{1,50}/`") self._reference = reference
[ "def", "reference", "(", "self", ",", "reference", ")", ":", "if", "reference", "is", "None", ":", "raise", "ValueError", "(", "\"Invalid value for `reference`, must not be `None`\"", ")", "if", "reference", "is", "not", "None", "and", "len", "(", "reference", ")", ">", "50", ":", "raise", "ValueError", "(", "\"Invalid value for `reference`, length must be less than or equal to `50`\"", ")", "if", "reference", "is", "not", "None", "and", "not", "re", ".", "search", "(", "'(?!mbed\\\\.)[\\\\w-_.]{1,50}'", ",", "reference", ")", ":", "raise", "ValueError", "(", "\"Invalid value for `reference`, must be a follow pattern or equal to `/(?!mbed\\\\.)[\\\\w-_.]{1,50}/`\"", ")", "self", ".", "_reference", "=", "reference" ]
58.0625
37.5625
def get_dir(self, obj): """Return the dirattr of obj formatted with the dirfomat specified in the constructor. If the attr is None then ``None`` is returned not the string ``\'None\'``. :param obj: the fileinfo with information. :type obj: :class:`FileInfo` :returns: the directory or None :rtype: str|None :raises: None """ if self._dirattr is None: return a = attrgetter(self._dirattr)(obj) if a is None: return s = self._dirformat % a return s
[ "def", "get_dir", "(", "self", ",", "obj", ")", ":", "if", "self", ".", "_dirattr", "is", "None", ":", "return", "a", "=", "attrgetter", "(", "self", ".", "_dirattr", ")", "(", "obj", ")", "if", "a", "is", "None", ":", "return", "s", "=", "self", ".", "_dirformat", "%", "a", "return", "s" ]
32.882353
14.941176
def resource_copy(package_or_requirement, resource_name, destination): ''' Copy file/dir resource to destination. Parameters ---------- package_or_requirement : str resource_name : str destination : ~pathlib.Path Path to copy to, it must not exist. ''' args = package_or_requirement, resource_name if resource_isdir(*args): destination.mkdir() for name in resource_listdir(*args): resource_copy( package_or_requirement, str(Path(resource_name) / name), destination / name ) else: with destination.open('wb') as f: with resource_stream(*args) as source: shutil.copyfileobj(source, f)
[ "def", "resource_copy", "(", "package_or_requirement", ",", "resource_name", ",", "destination", ")", ":", "args", "=", "package_or_requirement", ",", "resource_name", "if", "resource_isdir", "(", "*", "args", ")", ":", "destination", ".", "mkdir", "(", ")", "for", "name", "in", "resource_listdir", "(", "*", "args", ")", ":", "resource_copy", "(", "package_or_requirement", ",", "str", "(", "Path", "(", "resource_name", ")", "/", "name", ")", ",", "destination", "/", "name", ")", "else", ":", "with", "destination", ".", "open", "(", "'wb'", ")", "as", "f", ":", "with", "resource_stream", "(", "*", "args", ")", "as", "source", ":", "shutil", ".", "copyfileobj", "(", "source", ",", "f", ")" ]
30.666667
15.25
def set_functions(self, functions): """Check functions passed as argument and set them to be used.""" for func in functions: try: self.append_function(func) except (ValueError, AttributeError) as ex: log.error("'%s' is not a callable function: %s", func, ex) raise
[ "def", "set_functions", "(", "self", ",", "functions", ")", ":", "for", "func", "in", "functions", ":", "try", ":", "self", ".", "append_function", "(", "func", ")", "except", "(", "ValueError", ",", "AttributeError", ")", "as", "ex", ":", "log", ".", "error", "(", "\"'%s' is not a callable function: %s\"", ",", "func", ",", "ex", ")", "raise" ]
43.125
13.5
def get_tunnel_info_output_tunnel_src_ip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_tunnel_info = ET.Element("get_tunnel_info") config = get_tunnel_info output = ET.SubElement(get_tunnel_info, "output") tunnel = ET.SubElement(output, "tunnel") src_ip = ET.SubElement(tunnel, "src-ip") src_ip.text = kwargs.pop('src_ip') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "get_tunnel_info_output_tunnel_src_ip", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "get_tunnel_info", "=", "ET", ".", "Element", "(", "\"get_tunnel_info\"", ")", "config", "=", "get_tunnel_info", "output", "=", "ET", ".", "SubElement", "(", "get_tunnel_info", ",", "\"output\"", ")", "tunnel", "=", "ET", ".", "SubElement", "(", "output", ",", "\"tunnel\"", ")", "src_ip", "=", "ET", ".", "SubElement", "(", "tunnel", ",", "\"src-ip\"", ")", "src_ip", ".", "text", "=", "kwargs", ".", "pop", "(", "'src_ip'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
38.846154
11.076923
def generate_string_to_sign(date, region, canonical_request): """ Generate string to sign. :param date: Date is input from :meth:`datetime.datetime` :param region: Region should be set to bucket region. :param canonical_request: Canonical request generated previously. """ formatted_date_time = date.strftime("%Y%m%dT%H%M%SZ") canonical_request_hasher = hashlib.sha256() canonical_request_hasher.update(canonical_request.encode('utf-8')) canonical_request_sha256 = canonical_request_hasher.hexdigest() scope = generate_scope_string(date, region) return '\n'.join([_SIGN_V4_ALGORITHM, formatted_date_time, scope, canonical_request_sha256])
[ "def", "generate_string_to_sign", "(", "date", ",", "region", ",", "canonical_request", ")", ":", "formatted_date_time", "=", "date", ".", "strftime", "(", "\"%Y%m%dT%H%M%SZ\"", ")", "canonical_request_hasher", "=", "hashlib", ".", "sha256", "(", ")", "canonical_request_hasher", ".", "update", "(", "canonical_request", ".", "encode", "(", "'utf-8'", ")", ")", "canonical_request_sha256", "=", "canonical_request_hasher", ".", "hexdigest", "(", ")", "scope", "=", "generate_scope_string", "(", "date", ",", "region", ")", "return", "'\\n'", ".", "join", "(", "[", "_SIGN_V4_ALGORITHM", ",", "formatted_date_time", ",", "scope", ",", "canonical_request_sha256", "]", ")" ]
38.789474
17.421053
def moment(self, axis, channel=0, moment=1, *, resultant=None): """Take the nth moment the dataset along one axis, adding lower rank channels. New channels have names ``<channel name>_<axis name>_moment_<moment num>``. Moment 0 is the integral of the slice. Moment 1 is the weighted average or "Center of Mass", normalized by the integral Moment 2 is the variance, the central moment about the center of mass, normalized by the integral Moments 3+ are central moments about the center of mass, normalized by the integral and by the standard deviation to the power of the moment. Moments, especially higher order moments, are susceptible to noise and baseline. It is recommended when used with real data to use :meth:`WrightTools.data.Channel.clip` in conjunction with moments to reduce effects of noise. Parameters ---------- axis : int or str The axis to take the moment along. If given as an integer, the axis with that index is used. If given as a string, the axis with that name is used. The axis must exist, and be a 1D array-aligned axis. (i.e. have a shape with a single value which is not ``1``) The collapsed axis must be monotonic to produce correct results. The axis to collapse along is inferred from the shape of the axis. channel : int or str The channel to take the moment. If given as an integer, the channel with that index is used. If given as a string, the channel with that name is used. The channel must have values along the axis (i.e. its shape must not be ``1`` in the dimension for which the axis is not ``1``) Default is 0, the first channel. moment : int or tuple of int The moments to take. One channel will be created for each number given. Default is 1, the center of mass. resultant : tuple of int The resultant shape after the moment operation. By default, it is intuited by the axis along which the moment is being taken. This default only works if that axis is 1D, so resultant is required if a multidimensional axis is passed as the first argument. The requirement of monotonicity applies on a per pixel basis. See Also -------- collapse Reduce dimensionality by some mathematical operation clip Set values above/below a threshold to a particular value WrightTools.kit.joint_shape Useful for setting `resultant` kwarg based off of axes not collapsed. """ # get axis index -------------------------------------------------------------------------- axis_index = None if resultant is not None: for i, (s, r) in enumerate(zip(self.shape, resultant)): if s != r and r == 1 and axis_index is None: axis_index = i elif s == r: continue else: raise wt_exceptions.ValueError( f"Invalid resultant shape '{resultant}' for shape {self.shape}. " + "Consider using `wt.kit.joint_shape` to join non-collapsed axes." ) index = wt_kit.get_index(self.axis_names, axis) if axis_index is None: axes = [i for i in range(self.ndim) if self.axes[index].shape[i] > 1] if len(axes) > 1: raise wt_exceptions.MultidimensionalAxisError(axis, "moment") elif len(axes) == 0: raise wt_exceptions.ValueError( "Axis {} is a single point, cannot compute moment".format(axis) ) axis_index = axes[0] warnings.warn("moment", category=wt_exceptions.EntireDatasetInMemoryWarning) channel_index = wt_kit.get_index(self.channel_names, channel) channel = self.channel_names[channel_index] if self[channel].shape[axis_index] == 1: raise wt_exceptions.ValueError( "Channel '{}' has a single point along Axis '{}', cannot compute moment".format( channel, axis ) ) new_shape = list(self[channel].shape) new_shape[axis_index] = 1 channel = self[channel] axis_inp = axis axis = self.axes[index] x = axis[:] if np.any(np.isnan(x)): raise wt_exceptions.ValueError("Axis '{}' includes NaN".format(axis_inp)) y = np.nan_to_num(channel[:]) try: moments = tuple(moment) except TypeError: moments = (moment,) multiplier = 1 if 0 in moments: # May be possible to optimize, probably doesn't need the sum # only matters for integral, all others normalize by integral multiplier = np.sign( np.sum(np.diff(x, axis=axis_index), axis=axis_index, keepdims=True) ) for moment in moments: about = 0 norm = 1 if moment > 0: norm = np.trapz(y, x, axis=axis_index) norm = np.array(norm) norm.shape = new_shape if moment > 1: about = np.trapz(x * y, x, axis=axis_index) about = np.array(about) about.shape = new_shape about /= norm if moment > 2: sigma = np.trapz((x - about) ** 2 * y, x, axis=axis_index) sigma = np.array(sigma) sigma.shape = new_shape sigma /= norm sigma **= 0.5 norm *= sigma ** moment values = np.trapz((x - about) ** moment * y, x, axis=axis_index) values = np.array(values) values.shape = new_shape values /= norm if moment == 0: values *= multiplier self.create_channel( "{}_{}_{}_{}".format(channel.natural_name, axis_inp, "moment", moment), values=values, )
[ "def", "moment", "(", "self", ",", "axis", ",", "channel", "=", "0", ",", "moment", "=", "1", ",", "*", ",", "resultant", "=", "None", ")", ":", "# get axis index --------------------------------------------------------------------------", "axis_index", "=", "None", "if", "resultant", "is", "not", "None", ":", "for", "i", ",", "(", "s", ",", "r", ")", "in", "enumerate", "(", "zip", "(", "self", ".", "shape", ",", "resultant", ")", ")", ":", "if", "s", "!=", "r", "and", "r", "==", "1", "and", "axis_index", "is", "None", ":", "axis_index", "=", "i", "elif", "s", "==", "r", ":", "continue", "else", ":", "raise", "wt_exceptions", ".", "ValueError", "(", "f\"Invalid resultant shape '{resultant}' for shape {self.shape}. \"", "+", "\"Consider using `wt.kit.joint_shape` to join non-collapsed axes.\"", ")", "index", "=", "wt_kit", ".", "get_index", "(", "self", ".", "axis_names", ",", "axis", ")", "if", "axis_index", "is", "None", ":", "axes", "=", "[", "i", "for", "i", "in", "range", "(", "self", ".", "ndim", ")", "if", "self", ".", "axes", "[", "index", "]", ".", "shape", "[", "i", "]", ">", "1", "]", "if", "len", "(", "axes", ")", ">", "1", ":", "raise", "wt_exceptions", ".", "MultidimensionalAxisError", "(", "axis", ",", "\"moment\"", ")", "elif", "len", "(", "axes", ")", "==", "0", ":", "raise", "wt_exceptions", ".", "ValueError", "(", "\"Axis {} is a single point, cannot compute moment\"", ".", "format", "(", "axis", ")", ")", "axis_index", "=", "axes", "[", "0", "]", "warnings", ".", "warn", "(", "\"moment\"", ",", "category", "=", "wt_exceptions", ".", "EntireDatasetInMemoryWarning", ")", "channel_index", "=", "wt_kit", ".", "get_index", "(", "self", ".", "channel_names", ",", "channel", ")", "channel", "=", "self", ".", "channel_names", "[", "channel_index", "]", "if", "self", "[", "channel", "]", ".", "shape", "[", "axis_index", "]", "==", "1", ":", "raise", "wt_exceptions", ".", "ValueError", "(", "\"Channel '{}' has a single point along Axis '{}', cannot compute moment\"", ".", "format", "(", "channel", ",", "axis", ")", ")", "new_shape", "=", "list", "(", "self", "[", "channel", "]", ".", "shape", ")", "new_shape", "[", "axis_index", "]", "=", "1", "channel", "=", "self", "[", "channel", "]", "axis_inp", "=", "axis", "axis", "=", "self", ".", "axes", "[", "index", "]", "x", "=", "axis", "[", ":", "]", "if", "np", ".", "any", "(", "np", ".", "isnan", "(", "x", ")", ")", ":", "raise", "wt_exceptions", ".", "ValueError", "(", "\"Axis '{}' includes NaN\"", ".", "format", "(", "axis_inp", ")", ")", "y", "=", "np", ".", "nan_to_num", "(", "channel", "[", ":", "]", ")", "try", ":", "moments", "=", "tuple", "(", "moment", ")", "except", "TypeError", ":", "moments", "=", "(", "moment", ",", ")", "multiplier", "=", "1", "if", "0", "in", "moments", ":", "# May be possible to optimize, probably doesn't need the sum", "# only matters for integral, all others normalize by integral", "multiplier", "=", "np", ".", "sign", "(", "np", ".", "sum", "(", "np", ".", "diff", "(", "x", ",", "axis", "=", "axis_index", ")", ",", "axis", "=", "axis_index", ",", "keepdims", "=", "True", ")", ")", "for", "moment", "in", "moments", ":", "about", "=", "0", "norm", "=", "1", "if", "moment", ">", "0", ":", "norm", "=", "np", ".", "trapz", "(", "y", ",", "x", ",", "axis", "=", "axis_index", ")", "norm", "=", "np", ".", "array", "(", "norm", ")", "norm", ".", "shape", "=", "new_shape", "if", "moment", ">", "1", ":", "about", "=", "np", ".", "trapz", "(", "x", "*", "y", ",", "x", ",", "axis", "=", "axis_index", ")", "about", "=", "np", ".", "array", "(", "about", ")", "about", ".", "shape", "=", "new_shape", "about", "/=", "norm", "if", "moment", ">", "2", ":", "sigma", "=", "np", ".", "trapz", "(", "(", "x", "-", "about", ")", "**", "2", "*", "y", ",", "x", ",", "axis", "=", "axis_index", ")", "sigma", "=", "np", ".", "array", "(", "sigma", ")", "sigma", ".", "shape", "=", "new_shape", "sigma", "/=", "norm", "sigma", "**=", "0.5", "norm", "*=", "sigma", "**", "moment", "values", "=", "np", ".", "trapz", "(", "(", "x", "-", "about", ")", "**", "moment", "*", "y", ",", "x", ",", "axis", "=", "axis_index", ")", "values", "=", "np", ".", "array", "(", "values", ")", "values", ".", "shape", "=", "new_shape", "values", "/=", "norm", "if", "moment", "==", "0", ":", "values", "*=", "multiplier", "self", ".", "create_channel", "(", "\"{}_{}_{}_{}\"", ".", "format", "(", "channel", ".", "natural_name", ",", "axis_inp", ",", "\"moment\"", ",", "moment", ")", ",", "values", "=", "values", ",", ")" ]
42.708333
22.395833
def generate_flare_lightcurve( times, mags=None, errs=None, paramdists={ # flare peak amplitude from 0.01 mag to 1.0 mag above median. this # is tuned for redder bands, flares are much stronger in bluer # bands, so tune appropriately for your situation. 'amplitude':sps.uniform(loc=0.01,scale=0.99), # up to 5 flares per LC and at least 1 'nflares':[1,5], # 10 minutes to 1 hour for rise stdev 'risestdev':sps.uniform(loc=0.007, scale=0.04), # 1 hour to 4 hours for decay time constant 'decayconst':sps.uniform(loc=0.04, scale=0.163) }, magsarefluxes=False, ): '''This generates fake flare light curves. Parameters ---------- times : np.array This is an array of time values that will be used as the time base. mags,errs : np.array These arrays will have the model added to them. If either is None, `np.full_like(times, 0.0)` will used as a substitute and the model light curve will be centered around 0.0. paramdists : dict This is a dict containing parameter distributions to use for the model params, containing the following keys :: {'amplitude', 'nflares', 'risestdev', 'decayconst'} The values of these keys should all be 'frozen' scipy.stats distribution objects, e.g.: https://docs.scipy.org/doc/scipy/reference/stats.html#continuous-distributions The `flare_peak_time` for each flare will be generated automatically between `times.min()` and `times.max()` using a uniform distribution. The `amplitude` will be flipped automatically as appropriate if `magsarefluxes=True`. magsarefluxes : bool If the generated time series is meant to be a flux time-series, set this to True to get the correct sign of variability amplitude. Returns ------- dict A dict of the form below is returned:: {'vartype': 'flare', 'params': {'amplitude': generated value of flare amplitudes, 'nflares': generated value of number of flares, 'risestdev': generated value of stdev of rise time, 'decayconst': generated value of decay constant, 'peaktime': generated value of flare peak time}, 'times': the model times, 'mags': the model mags, 'errs': the model errs, 'varamplitude': the generated amplitude of variability == 'amplitude'} ''' if mags is None: mags = np.full_like(times, 0.0) if errs is None: errs = np.full_like(times, 0.0) nflares = npr.randint(paramdists['nflares'][0], high=paramdists['nflares'][1]) # generate random flare peak times based on the number of flares flarepeaktimes = ( npr.random( size=nflares )*(times.max() - times.min()) + times.min() ) # now add the flares to the time-series params = {'nflares':nflares} for flareind, peaktime in zip(range(nflares), flarepeaktimes): # choose the amplitude, rise stdev and decay time constant amp = paramdists['amplitude'].rvs(size=1) risestdev = paramdists['risestdev'].rvs(size=1) decayconst = paramdists['decayconst'].rvs(size=1) # fix the transit depth if it needs to be flipped if magsarefluxes and amp < 0.0: amp = -amp elif not magsarefluxes and amp > 0.0: amp = -amp # add this flare to the light curve modelmags, ptimes, pmags, perrs = ( flares.flare_model( [amp, peaktime, risestdev, decayconst], times, mags, errs ) ) # update the mags mags = modelmags # add the flare params to the modeldict params[flareind] = {'peaktime':peaktime, 'amplitude':amp, 'risestdev':risestdev, 'decayconst':decayconst} # # done with all flares # # return a dict with everything modeldict = { 'vartype':'flare', 'params':params, 'times':times, 'mags':mags, 'errs':errs, 'varperiod':None, # FIXME: this is complicated because we can have multiple flares # figure out a good way to handle this upstream 'varamplitude':[params[x]['amplitude'] for x in range(params['nflares'])], } return modeldict
[ "def", "generate_flare_lightcurve", "(", "times", ",", "mags", "=", "None", ",", "errs", "=", "None", ",", "paramdists", "=", "{", "# flare peak amplitude from 0.01 mag to 1.0 mag above median. this", "# is tuned for redder bands, flares are much stronger in bluer", "# bands, so tune appropriately for your situation.", "'amplitude'", ":", "sps", ".", "uniform", "(", "loc", "=", "0.01", ",", "scale", "=", "0.99", ")", ",", "# up to 5 flares per LC and at least 1", "'nflares'", ":", "[", "1", ",", "5", "]", ",", "# 10 minutes to 1 hour for rise stdev", "'risestdev'", ":", "sps", ".", "uniform", "(", "loc", "=", "0.007", ",", "scale", "=", "0.04", ")", ",", "# 1 hour to 4 hours for decay time constant", "'decayconst'", ":", "sps", ".", "uniform", "(", "loc", "=", "0.04", ",", "scale", "=", "0.163", ")", "}", ",", "magsarefluxes", "=", "False", ",", ")", ":", "if", "mags", "is", "None", ":", "mags", "=", "np", ".", "full_like", "(", "times", ",", "0.0", ")", "if", "errs", "is", "None", ":", "errs", "=", "np", ".", "full_like", "(", "times", ",", "0.0", ")", "nflares", "=", "npr", ".", "randint", "(", "paramdists", "[", "'nflares'", "]", "[", "0", "]", ",", "high", "=", "paramdists", "[", "'nflares'", "]", "[", "1", "]", ")", "# generate random flare peak times based on the number of flares", "flarepeaktimes", "=", "(", "npr", ".", "random", "(", "size", "=", "nflares", ")", "*", "(", "times", ".", "max", "(", ")", "-", "times", ".", "min", "(", ")", ")", "+", "times", ".", "min", "(", ")", ")", "# now add the flares to the time-series", "params", "=", "{", "'nflares'", ":", "nflares", "}", "for", "flareind", ",", "peaktime", "in", "zip", "(", "range", "(", "nflares", ")", ",", "flarepeaktimes", ")", ":", "# choose the amplitude, rise stdev and decay time constant", "amp", "=", "paramdists", "[", "'amplitude'", "]", ".", "rvs", "(", "size", "=", "1", ")", "risestdev", "=", "paramdists", "[", "'risestdev'", "]", ".", "rvs", "(", "size", "=", "1", ")", "decayconst", "=", "paramdists", "[", "'decayconst'", "]", ".", "rvs", "(", "size", "=", "1", ")", "# fix the transit depth if it needs to be flipped", "if", "magsarefluxes", "and", "amp", "<", "0.0", ":", "amp", "=", "-", "amp", "elif", "not", "magsarefluxes", "and", "amp", ">", "0.0", ":", "amp", "=", "-", "amp", "# add this flare to the light curve", "modelmags", ",", "ptimes", ",", "pmags", ",", "perrs", "=", "(", "flares", ".", "flare_model", "(", "[", "amp", ",", "peaktime", ",", "risestdev", ",", "decayconst", "]", ",", "times", ",", "mags", ",", "errs", ")", ")", "# update the mags", "mags", "=", "modelmags", "# add the flare params to the modeldict", "params", "[", "flareind", "]", "=", "{", "'peaktime'", ":", "peaktime", ",", "'amplitude'", ":", "amp", ",", "'risestdev'", ":", "risestdev", ",", "'decayconst'", ":", "decayconst", "}", "#", "# done with all flares", "#", "# return a dict with everything", "modeldict", "=", "{", "'vartype'", ":", "'flare'", ",", "'params'", ":", "params", ",", "'times'", ":", "times", ",", "'mags'", ":", "mags", ",", "'errs'", ":", "errs", ",", "'varperiod'", ":", "None", ",", "# FIXME: this is complicated because we can have multiple flares", "# figure out a good way to handle this upstream", "'varamplitude'", ":", "[", "params", "[", "x", "]", "[", "'amplitude'", "]", "for", "x", "in", "range", "(", "params", "[", "'nflares'", "]", ")", "]", ",", "}", "return", "modeldict" ]
32.507042
23.591549