text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def validate_endpoint(ctx, param, value): """Validate endpoint.""" try: config = ctx.obj['config'] except Exception: return endpoint = default_endpoint(ctx, param, value) if endpoint not in config.get('endpoints', {}): raise click.UsageError('Unknown endpoint: {0}'.format(endpoint)) return endpoint
[ "def", "validate_endpoint", "(", "ctx", ",", "param", ",", "value", ")", ":", "try", ":", "config", "=", "ctx", ".", "obj", "[", "'config'", "]", "except", "Exception", ":", "return", "endpoint", "=", "default_endpoint", "(", "ctx", ",", "param", ",", "value", ")", "if", "endpoint", "not", "in", "config", ".", "get", "(", "'endpoints'", ",", "{", "}", ")", ":", "raise", "click", ".", "UsageError", "(", "'Unknown endpoint: {0}'", ".", "format", "(", "endpoint", ")", ")", "return", "endpoint" ]
26
21.384615
def to_hdf5(input): """ Convert .xml and .npz files to .hdf5 files. """ with performance.Monitor('to_hdf5') as mon: for input_file in input: if input_file.endswith('.npz'): output = convert_npz_hdf5(input_file, input_file[:-3] + 'hdf5') elif input_file.endswith('.xml'): # for source model files output = convert_xml_hdf5(input_file, input_file[:-3] + 'hdf5') else: continue print('Generated %s' % output) print(mon)
[ "def", "to_hdf5", "(", "input", ")", ":", "with", "performance", ".", "Monitor", "(", "'to_hdf5'", ")", "as", "mon", ":", "for", "input_file", "in", "input", ":", "if", "input_file", ".", "endswith", "(", "'.npz'", ")", ":", "output", "=", "convert_npz_hdf5", "(", "input_file", ",", "input_file", "[", ":", "-", "3", "]", "+", "'hdf5'", ")", "elif", "input_file", ".", "endswith", "(", "'.xml'", ")", ":", "# for source model files", "output", "=", "convert_xml_hdf5", "(", "input_file", ",", "input_file", "[", ":", "-", "3", "]", "+", "'hdf5'", ")", "else", ":", "continue", "print", "(", "'Generated %s'", "%", "output", ")", "print", "(", "mon", ")" ]
38
15.857143
def use(self, algorithm): """Change the hash algorithm you gonna use. """ algorithm = algorithm.lower() if algorithm == "md5": self.default_hash_method = hashlib.md5 elif algorithm == "sha1": self.default_hash_method = hashlib.sha1 elif algorithm == "sha224": self.default_hash_method = hashlib.sha224 elif algorithm == "sha256": self.default_hash_method = hashlib.sha256 elif algorithm == "sha384": self.default_hash_method = hashlib.sha384 elif algorithm == "sha512": self.default_hash_method = hashlib.sha512 else: raise WrongHashAlgorithmError("There's no algorithm names '%s'! " "use one of 'md5', 'sha1', 'sha224', " "'sha256', 'sha384', 'sha512'." % algorithm)
[ "def", "use", "(", "self", ",", "algorithm", ")", ":", "algorithm", "=", "algorithm", ".", "lower", "(", ")", "if", "algorithm", "==", "\"md5\"", ":", "self", ".", "default_hash_method", "=", "hashlib", ".", "md5", "elif", "algorithm", "==", "\"sha1\"", ":", "self", ".", "default_hash_method", "=", "hashlib", ".", "sha1", "elif", "algorithm", "==", "\"sha224\"", ":", "self", ".", "default_hash_method", "=", "hashlib", ".", "sha224", "elif", "algorithm", "==", "\"sha256\"", ":", "self", ".", "default_hash_method", "=", "hashlib", ".", "sha256", "elif", "algorithm", "==", "\"sha384\"", ":", "self", ".", "default_hash_method", "=", "hashlib", ".", "sha384", "elif", "algorithm", "==", "\"sha512\"", ":", "self", ".", "default_hash_method", "=", "hashlib", ".", "sha512", "else", ":", "raise", "WrongHashAlgorithmError", "(", "\"There's no algorithm names '%s'! \"", "\"use one of 'md5', 'sha1', 'sha224', \"", "\"'sha256', 'sha384', 'sha512'.\"", "%", "algorithm", ")" ]
44.8
13.9
def contains_key(self, key): """ Determines whether this multimap contains an entry with the key. **Warning: This method uses __hash__ and __eq__ methods of binary form of the key, not the actual implementations of __hash__ and __eq__ defined in key's class.** :param key: (object), the specified key. :return: (bool), ``true`` if this multimap contains an entry for the specified key. """ check_not_none(key, "key can't be None") key_data = self._to_data(key) return self._encode_invoke_on_key(multi_map_contains_key_codec, key_data, key=key_data, thread_id=thread_id())
[ "def", "contains_key", "(", "self", ",", "key", ")", ":", "check_not_none", "(", "key", ",", "\"key can't be None\"", ")", "key_data", "=", "self", ".", "_to_data", "(", "key", ")", "return", "self", ".", "_encode_invoke_on_key", "(", "multi_map_contains_key_codec", ",", "key_data", ",", "key", "=", "key_data", ",", "thread_id", "=", "thread_id", "(", ")", ")" ]
48.714286
26.428571
def compute_k(self, memory_antecedent): """Compute key Tensor k. Args: memory_antecedent: a Tensor with dimensions {memory_input_dim} + other_dims Returns: a Tensor with dimensions memory_heads_dims + {key_dim} + other_dims """ if self.shared_kv: raise ValueError("compute_k cannot be called with shared_kv") ret = mtf.einsum( [memory_antecedent, self.wk], reduced_dims=[self.memory_input_dim]) if self.combine_dims: ret = mtf.replace_dimensions(ret, ret.shape.dims[-1], self.k_dims) return ret
[ "def", "compute_k", "(", "self", ",", "memory_antecedent", ")", ":", "if", "self", ".", "shared_kv", ":", "raise", "ValueError", "(", "\"compute_k cannot be called with shared_kv\"", ")", "ret", "=", "mtf", ".", "einsum", "(", "[", "memory_antecedent", ",", "self", ".", "wk", "]", ",", "reduced_dims", "=", "[", "self", ".", "memory_input_dim", "]", ")", "if", "self", ".", "combine_dims", ":", "ret", "=", "mtf", ".", "replace_dimensions", "(", "ret", ",", "ret", ".", "shape", ".", "dims", "[", "-", "1", "]", ",", "self", ".", "k_dims", ")", "return", "ret" ]
32.882353
17.764706
def serve_get(self, path, **params): """ Find a GET callback for the given HTTP path, call it and return the results. The callback is called with two arguments, the path used to match it, and params which include the BaseHTTPRequestHandler instance. The callback must return a tuple: (code, content, content_type) If multiple registrations match the path, the one with the longest matching text will be used. Matches are always anchored at the start of the path. None is returned if no registered callback is willing to handle a path. """ if path is None: return None matched = self._match_path(path, self.get_registrations) if matched is None: return None else: return matched(path, **params)
[ "def", "serve_get", "(", "self", ",", "path", ",", "*", "*", "params", ")", ":", "if", "path", "is", "None", ":", "return", "None", "matched", "=", "self", ".", "_match_path", "(", "path", ",", "self", ".", "get_registrations", ")", "if", "matched", "is", "None", ":", "return", "None", "else", ":", "return", "matched", "(", "path", ",", "*", "*", "params", ")" ]
35.73913
23.217391
def fetch(self): """ Returns a tuple of the major version together with the appropriate SHA and dirty bit (for development version only). """ if self._release is not None: return self self._release = self.expected_release if not self.fpath: self._commit = self._expected_commit return self # Only git right now but easily extended to SVN, Mercurial, etc. for cmd in ['git', 'git.cmd', 'git.exe']: try: self.git_fetch(cmd) break except EnvironmentError: pass return self
[ "def", "fetch", "(", "self", ")", ":", "if", "self", ".", "_release", "is", "not", "None", ":", "return", "self", "self", ".", "_release", "=", "self", ".", "expected_release", "if", "not", "self", ".", "fpath", ":", "self", ".", "_commit", "=", "self", ".", "_expected_commit", "return", "self", "# Only git right now but easily extended to SVN, Mercurial, etc.", "for", "cmd", "in", "[", "'git'", ",", "'git.cmd'", ",", "'git.exe'", "]", ":", "try", ":", "self", ".", "git_fetch", "(", "cmd", ")", "break", "except", "EnvironmentError", ":", "pass", "return", "self" ]
30.47619
16.857143
def get_ip(host): ''' Return the ip associated with the named host CLI Example: .. code-block:: bash salt '*' hosts.get_ip <hostname> ''' hosts = _list_hosts() if not hosts: return '' # Look for the op for addr in hosts: if host in hosts[addr]: return addr # ip not found return ''
[ "def", "get_ip", "(", "host", ")", ":", "hosts", "=", "_list_hosts", "(", ")", "if", "not", "hosts", ":", "return", "''", "# Look for the op", "for", "addr", "in", "hosts", ":", "if", "host", "in", "hosts", "[", "addr", "]", ":", "return", "addr", "# ip not found", "return", "''" ]
18.210526
22.631579
def get(self, label, default=None): """ Returns value occupying requested label, default to specified missing value if not present. Analogous to dict.get Parameters ---------- label : object Label value looking for default : object, optional Value to return if label not in index Returns ------- y : scalar """ if label in self.index: loc = self.index.get_loc(label) return self._get_val_at(loc) else: return default
[ "def", "get", "(", "self", ",", "label", ",", "default", "=", "None", ")", ":", "if", "label", "in", "self", ".", "index", ":", "loc", "=", "self", ".", "index", ".", "get_loc", "(", "label", ")", "return", "self", ".", "_get_val_at", "(", "loc", ")", "else", ":", "return", "default" ]
26.761905
16.190476
def kl_reverse(logu, self_normalized=False, name=None): """The reverse Kullback-Leibler Csiszar-function in log-space. A Csiszar-function is a member of, ```none F = { f:R_+ to R : f convex }. ``` When `self_normalized = True`, the KL-reverse Csiszar-function is: ```none f(u) = -log(u) + (u - 1) ``` When `self_normalized = False` the `(u - 1)` term is omitted. Observe that as an f-Divergence, this Csiszar-function implies: ```none D_f[p, q] = KL[q, p] ``` The KL is "reverse" because in maximum likelihood we think of minimizing `q` as in `KL[p, q]`. Warning: when self_normalized = True` this function makes non-log-space calculations and may therefore be numerically unstable for `|logu| >> 0`. Args: logu: `float`-like `Tensor` representing `log(u)` from above. self_normalized: Python `bool` indicating whether `f'(u=1)=0`. When `f'(u=1)=0` the implied Csiszar f-Divergence remains non-negative even when `p, q` are unnormalized measures. name: Python `str` name prefixed to Ops created by this function. Returns: kl_reverse_of_u: `float`-like `Tensor` of the Csiszar-function evaluated at `u = exp(logu)`. Raises: TypeError: if `self_normalized` is `None` or a `Tensor`. """ with tf.compat.v1.name_scope(name, "kl_reverse", [logu]): return amari_alpha(logu, alpha=0., self_normalized=self_normalized)
[ "def", "kl_reverse", "(", "logu", ",", "self_normalized", "=", "False", ",", "name", "=", "None", ")", ":", "with", "tf", ".", "compat", ".", "v1", ".", "name_scope", "(", "name", ",", "\"kl_reverse\"", ",", "[", "logu", "]", ")", ":", "return", "amari_alpha", "(", "logu", ",", "alpha", "=", "0.", ",", "self_normalized", "=", "self_normalized", ")" ]
29.673913
28.826087
def moderate_model(ParentModel, publication_date_field=None, enable_comments_field=None): """ Register a parent model (e.g. ``Blog`` or ``Article``) that should receive comment moderation. :param ParentModel: The parent model, e.g. a ``Blog`` or ``Article`` model. :param publication_date_field: The field name of a :class:`~django.db.models.DateTimeField` in the parent model which stores the publication date. :type publication_date_field: str :param enable_comments_field: The field name of a :class:`~django.db.models.BooleanField` in the parent model which stores the whether comments are enabled. :type enable_comments_field: str """ attrs = { 'auto_close_field': publication_date_field, 'auto_moderate_field': publication_date_field, 'enable_field': enable_comments_field, } ModerationClass = type(ParentModel.__name__ + 'Moderator', (FluentCommentsModerator,), attrs) moderator.register(ParentModel, ModerationClass)
[ "def", "moderate_model", "(", "ParentModel", ",", "publication_date_field", "=", "None", ",", "enable_comments_field", "=", "None", ")", ":", "attrs", "=", "{", "'auto_close_field'", ":", "publication_date_field", ",", "'auto_moderate_field'", ":", "publication_date_field", ",", "'enable_field'", ":", "enable_comments_field", ",", "}", "ModerationClass", "=", "type", "(", "ParentModel", ".", "__name__", "+", "'Moderator'", ",", "(", "FluentCommentsModerator", ",", ")", ",", "attrs", ")", "moderator", ".", "register", "(", "ParentModel", ",", "ModerationClass", ")" ]
57.705882
34.411765
def eventFilter(self, object, event): """ Filters events for the popup tree widget. :param object | <QObject> event | <QEvent> :retuen <bool> | consumed """ edit = self.lineEdit() if not (object and object == self._treePopupWidget): return super(XOrbRecordBox, self).eventFilter(object, event) elif event.type() == event.Show: object.resizeToContents() object.horizontalScrollBar().setValue(0) elif edit and event.type() == event.KeyPress: # accept lookup if event.key() in (Qt.Key_Enter, Qt.Key_Return, Qt.Key_Tab, Qt.Key_Backtab): item = object.currentItem() text = edit.text() if not text: record = None item = None elif isinstance(item, XOrbRecordItem): record = item.record() if record and item.isSelected() and not item.isHidden(): self.hidePopup() self.setCurrentRecord(record) event.accept() return True else: self.setCurrentRecord(None) self.hidePopup() edit.setText(text) edit.keyPressEvent(event) event.accept() return True # cancel lookup elif event.key() == Qt.Key_Escape: text = edit.text() self.setCurrentRecord(None) edit.setText(text) self.hidePopup() event.accept() return True # update the search info else: edit.keyPressEvent(event) elif edit and event.type() == event.KeyRelease: edit.keyReleaseEvent(event) elif edit and event.type() == event.MouseButtonPress: local_pos = object.mapFromGlobal(event.globalPos()) in_widget = object.rect().contains(local_pos) if not in_widget: text = edit.text() self.setCurrentRecord(None) edit.setText(text) self.hidePopup() event.accept() return True return super(XOrbRecordBox, self).eventFilter(object, event)
[ "def", "eventFilter", "(", "self", ",", "object", ",", "event", ")", ":", "edit", "=", "self", ".", "lineEdit", "(", ")", "if", "not", "(", "object", "and", "object", "==", "self", ".", "_treePopupWidget", ")", ":", "return", "super", "(", "XOrbRecordBox", ",", "self", ")", ".", "eventFilter", "(", "object", ",", "event", ")", "elif", "event", ".", "type", "(", ")", "==", "event", ".", "Show", ":", "object", ".", "resizeToContents", "(", ")", "object", ".", "horizontalScrollBar", "(", ")", ".", "setValue", "(", "0", ")", "elif", "edit", "and", "event", ".", "type", "(", ")", "==", "event", ".", "KeyPress", ":", "# accept lookup\r", "if", "event", ".", "key", "(", ")", "in", "(", "Qt", ".", "Key_Enter", ",", "Qt", ".", "Key_Return", ",", "Qt", ".", "Key_Tab", ",", "Qt", ".", "Key_Backtab", ")", ":", "item", "=", "object", ".", "currentItem", "(", ")", "text", "=", "edit", ".", "text", "(", ")", "if", "not", "text", ":", "record", "=", "None", "item", "=", "None", "elif", "isinstance", "(", "item", ",", "XOrbRecordItem", ")", ":", "record", "=", "item", ".", "record", "(", ")", "if", "record", "and", "item", ".", "isSelected", "(", ")", "and", "not", "item", ".", "isHidden", "(", ")", ":", "self", ".", "hidePopup", "(", ")", "self", ".", "setCurrentRecord", "(", "record", ")", "event", ".", "accept", "(", ")", "return", "True", "else", ":", "self", ".", "setCurrentRecord", "(", "None", ")", "self", ".", "hidePopup", "(", ")", "edit", ".", "setText", "(", "text", ")", "edit", ".", "keyPressEvent", "(", "event", ")", "event", ".", "accept", "(", ")", "return", "True", "# cancel lookup\r", "elif", "event", ".", "key", "(", ")", "==", "Qt", ".", "Key_Escape", ":", "text", "=", "edit", ".", "text", "(", ")", "self", ".", "setCurrentRecord", "(", "None", ")", "edit", ".", "setText", "(", "text", ")", "self", ".", "hidePopup", "(", ")", "event", ".", "accept", "(", ")", "return", "True", "# update the search info\r", "else", ":", "edit", ".", "keyPressEvent", "(", "event", ")", "elif", "edit", "and", "event", ".", "type", "(", ")", "==", "event", ".", "KeyRelease", ":", "edit", ".", "keyReleaseEvent", "(", "event", ")", "elif", "edit", "and", "event", ".", "type", "(", ")", "==", "event", ".", "MouseButtonPress", ":", "local_pos", "=", "object", ".", "mapFromGlobal", "(", "event", ".", "globalPos", "(", ")", ")", "in_widget", "=", "object", ".", "rect", "(", ")", ".", "contains", "(", "local_pos", ")", "if", "not", "in_widget", ":", "text", "=", "edit", ".", "text", "(", ")", "self", ".", "setCurrentRecord", "(", "None", ")", "edit", ".", "setText", "(", "text", ")", "self", ".", "hidePopup", "(", ")", "event", ".", "accept", "(", ")", "return", "True", "return", "super", "(", "XOrbRecordBox", ",", "self", ")", ".", "eventFilter", "(", "object", ",", "event", ")" ]
34.769231
12.666667
def process_raw_data(cls, raw_data): """Create a new model using raw API response.""" properties = raw_data.get("properties", {}) raw_metadata = raw_data.get("resourceMetadata", None) if raw_metadata is not None: metadata = ResourceMetadata.from_raw_data(raw_metadata) raw_data["resourceMetadata"] = metadata raw_state = properties.get("configurationState", None) if raw_state is not None: configuration = ConfigurationState.from_raw_data(raw_state) properties["configurationState"] = configuration return super(_BaseHNVModel, cls).process_raw_data(raw_data)
[ "def", "process_raw_data", "(", "cls", ",", "raw_data", ")", ":", "properties", "=", "raw_data", ".", "get", "(", "\"properties\"", ",", "{", "}", ")", "raw_metadata", "=", "raw_data", ".", "get", "(", "\"resourceMetadata\"", ",", "None", ")", "if", "raw_metadata", "is", "not", "None", ":", "metadata", "=", "ResourceMetadata", ".", "from_raw_data", "(", "raw_metadata", ")", "raw_data", "[", "\"resourceMetadata\"", "]", "=", "metadata", "raw_state", "=", "properties", ".", "get", "(", "\"configurationState\"", ",", "None", ")", "if", "raw_state", "is", "not", "None", ":", "configuration", "=", "ConfigurationState", ".", "from_raw_data", "(", "raw_state", ")", "properties", "[", "\"configurationState\"", "]", "=", "configuration", "return", "super", "(", "_BaseHNVModel", ",", "cls", ")", ".", "process_raw_data", "(", "raw_data", ")" ]
43.4
20.333333
def create_missing_perms(self): """Creates missing perms for datasources, schemas and metrics""" from superset import db from superset.models import core as models logging.info( 'Fetching a set of all perms to lookup which ones are missing') all_pvs = set() for pv in self.get_session.query(self.permissionview_model).all(): if pv.permission and pv.view_menu: all_pvs.add((pv.permission.name, pv.view_menu.name)) def merge_pv(view_menu, perm): """Create permission view menu only if it doesn't exist""" if view_menu and perm and (view_menu, perm) not in all_pvs: self.merge_perm(view_menu, perm) logging.info('Creating missing datasource permissions.') datasources = ConnectorRegistry.get_all_datasources(db.session) for datasource in datasources: merge_pv('datasource_access', datasource.get_perm()) merge_pv('schema_access', datasource.schema_perm) logging.info('Creating missing database permissions.') databases = db.session.query(models.Database).all() for database in databases: merge_pv('database_access', database.perm) logging.info('Creating missing metrics permissions') metrics = [] for datasource_class in ConnectorRegistry.sources.values(): metrics += list(db.session.query(datasource_class.metric_class).all()) for metric in metrics: if metric.is_restricted: merge_pv('metric_access', metric.perm)
[ "def", "create_missing_perms", "(", "self", ")", ":", "from", "superset", "import", "db", "from", "superset", ".", "models", "import", "core", "as", "models", "logging", ".", "info", "(", "'Fetching a set of all perms to lookup which ones are missing'", ")", "all_pvs", "=", "set", "(", ")", "for", "pv", "in", "self", ".", "get_session", ".", "query", "(", "self", ".", "permissionview_model", ")", ".", "all", "(", ")", ":", "if", "pv", ".", "permission", "and", "pv", ".", "view_menu", ":", "all_pvs", ".", "add", "(", "(", "pv", ".", "permission", ".", "name", ",", "pv", ".", "view_menu", ".", "name", ")", ")", "def", "merge_pv", "(", "view_menu", ",", "perm", ")", ":", "\"\"\"Create permission view menu only if it doesn't exist\"\"\"", "if", "view_menu", "and", "perm", "and", "(", "view_menu", ",", "perm", ")", "not", "in", "all_pvs", ":", "self", ".", "merge_perm", "(", "view_menu", ",", "perm", ")", "logging", ".", "info", "(", "'Creating missing datasource permissions.'", ")", "datasources", "=", "ConnectorRegistry", ".", "get_all_datasources", "(", "db", ".", "session", ")", "for", "datasource", "in", "datasources", ":", "merge_pv", "(", "'datasource_access'", ",", "datasource", ".", "get_perm", "(", ")", ")", "merge_pv", "(", "'schema_access'", ",", "datasource", ".", "schema_perm", ")", "logging", ".", "info", "(", "'Creating missing database permissions.'", ")", "databases", "=", "db", ".", "session", ".", "query", "(", "models", ".", "Database", ")", ".", "all", "(", ")", "for", "database", "in", "databases", ":", "merge_pv", "(", "'database_access'", ",", "database", ".", "perm", ")", "logging", ".", "info", "(", "'Creating missing metrics permissions'", ")", "metrics", "=", "[", "]", "for", "datasource_class", "in", "ConnectorRegistry", ".", "sources", ".", "values", "(", ")", ":", "metrics", "+=", "list", "(", "db", ".", "session", ".", "query", "(", "datasource_class", ".", "metric_class", ")", ".", "all", "(", ")", ")", "for", "metric", "in", "metrics", ":", "if", "metric", ".", "is_restricted", ":", "merge_pv", "(", "'metric_access'", ",", "metric", ".", "perm", ")" ]
43.722222
20.777778
def to_b58check(self, testnet=False): """ Generates a Base58Check encoding of this key. Args: testnet (bool): True if the key is to be used with testnet, False otherwise. Returns: str: A Base58Check encoded string representing the key. """ b = self.testnet_bytes if testnet else bytes(self) return base58.b58encode_check(b)
[ "def", "to_b58check", "(", "self", ",", "testnet", "=", "False", ")", ":", "b", "=", "self", ".", "testnet_bytes", "if", "testnet", "else", "bytes", "(", "self", ")", "return", "base58", ".", "b58encode_check", "(", "b", ")" ]
36.545455
14.727273
def make_short_chunks_from_unused( self,min_length,overlap=0,play=0,sl=0,excl_play=0): """ Create a chunk that uses up the unused data in the science segment @param min_length: the unused data must be greater than min_length to make a chunk. @param overlap: overlap between chunks in seconds. @param play: if true, only generate chunks that overlap with S2 playground data. @param sl: slide by sl seconds before determining playground data. @param excl_play: exclude the first excl_play second from the start and end of the chunk when computing if the chunk overlaps with playground. """ for seg in self.__sci_segs: if seg.unused() > min_length: start = seg.end() - seg.unused() - overlap end = seg.end() length = start - end if (not play) or (play and (((end-sl-excl_play-729273613)%6370) < (600+length-2*excl_play))): seg.add_chunk(start, end, start) seg.set_unused(0)
[ "def", "make_short_chunks_from_unused", "(", "self", ",", "min_length", ",", "overlap", "=", "0", ",", "play", "=", "0", ",", "sl", "=", "0", ",", "excl_play", "=", "0", ")", ":", "for", "seg", "in", "self", ".", "__sci_segs", ":", "if", "seg", ".", "unused", "(", ")", ">", "min_length", ":", "start", "=", "seg", ".", "end", "(", ")", "-", "seg", ".", "unused", "(", ")", "-", "overlap", "end", "=", "seg", ".", "end", "(", ")", "length", "=", "start", "-", "end", "if", "(", "not", "play", ")", "or", "(", "play", "and", "(", "(", "(", "end", "-", "sl", "-", "excl_play", "-", "729273613", ")", "%", "6370", ")", "<", "(", "600", "+", "length", "-", "2", "*", "excl_play", ")", ")", ")", ":", "seg", ".", "add_chunk", "(", "start", ",", "end", ",", "start", ")", "seg", ".", "set_unused", "(", "0", ")" ]
45.809524
18.380952
def fixed_point_density_preserving(points, cells, *args, **kwargs): """Idea: Move interior mesh points into the weighted averages of the circumcenters of their adjacent cells. If a triangle cell switches orientation in the process, don't move quite so far. """ def get_new_points(mesh): # Get circumcenters everywhere except at cells adjacent to the boundary; # barycenters there. cc = mesh.cell_circumcenters bc = mesh.cell_barycenters # Find all cells with a boundary edge boundary_cell_ids = mesh.edges_cells[1][:, 0] cc[boundary_cell_ids] = bc[boundary_cell_ids] return get_new_points_count_averaged(mesh, cc) mesh = MeshTri(points, cells) runner(get_new_points, mesh, *args, **kwargs) return mesh.node_coords, mesh.cells["nodes"]
[ "def", "fixed_point_density_preserving", "(", "points", ",", "cells", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "get_new_points", "(", "mesh", ")", ":", "# Get circumcenters everywhere except at cells adjacent to the boundary;", "# barycenters there.", "cc", "=", "mesh", ".", "cell_circumcenters", "bc", "=", "mesh", ".", "cell_barycenters", "# Find all cells with a boundary edge", "boundary_cell_ids", "=", "mesh", ".", "edges_cells", "[", "1", "]", "[", ":", ",", "0", "]", "cc", "[", "boundary_cell_ids", "]", "=", "bc", "[", "boundary_cell_ids", "]", "return", "get_new_points_count_averaged", "(", "mesh", ",", "cc", ")", "mesh", "=", "MeshTri", "(", "points", ",", "cells", ")", "runner", "(", "get_new_points", ",", "mesh", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "mesh", ".", "node_coords", ",", "mesh", ".", "cells", "[", "\"nodes\"", "]" ]
40.85
16.2
def get_flake8_options(config_dir='.'): # type: (str) -> List[str] """Checks for local config overrides for `flake8` and add them in the correct `flake8` `options` format. :param config_dir: :return: List[str] """ if FLAKE8_CONFIG_NAME in os.listdir(config_dir): flake8_config_path = FLAKE8_CONFIG_NAME else: flake8_config_path = DEFAULT_FLAKE8_CONFIG_PATH return ['--config={}'.format(flake8_config_path)]
[ "def", "get_flake8_options", "(", "config_dir", "=", "'.'", ")", ":", "# type: (str) -> List[str]", "if", "FLAKE8_CONFIG_NAME", "in", "os", ".", "listdir", "(", "config_dir", ")", ":", "flake8_config_path", "=", "FLAKE8_CONFIG_NAME", "else", ":", "flake8_config_path", "=", "DEFAULT_FLAKE8_CONFIG_PATH", "return", "[", "'--config={}'", ".", "format", "(", "flake8_config_path", ")", "]" ]
31.928571
15.928571
def _set_virtual(self, key, value): """ Recursively set or update virtual keys. Do nothing if non-virtual value is present. """ if key in self and key not in self._virtual_keys: return # Do nothing for non-virtual keys. self._virtual_keys.add(key) if key in self and self[key] is not value: self._on_change(key, value) dict.__setitem__(self, key, value) for overlay in self._iter_overlays(): overlay._set_virtual(key, value)
[ "def", "_set_virtual", "(", "self", ",", "key", ",", "value", ")", ":", "if", "key", "in", "self", "and", "key", "not", "in", "self", ".", "_virtual_keys", ":", "return", "# Do nothing for non-virtual keys.", "self", ".", "_virtual_keys", ".", "add", "(", "key", ")", "if", "key", "in", "self", "and", "self", "[", "key", "]", "is", "not", "value", ":", "self", ".", "_on_change", "(", "key", ",", "value", ")", "dict", ".", "__setitem__", "(", "self", ",", "key", ",", "value", ")", "for", "overlay", "in", "self", ".", "_iter_overlays", "(", ")", ":", "overlay", ".", "_set_virtual", "(", "key", ",", "value", ")" ]
42.583333
8.583333
def change_logger_levels(logger=None, level=logging.DEBUG): """ Go through the logger and handlers and update their levels to the one specified. :param logger: logging name or object to modify, defaults to root logger :param level: logging level to set at (10=Debug, 20=Info, 30=Warn, 40=Error) """ if not isinstance(logger, logging.Logger): logger = logging.getLogger(logger) logger.setLevel(level) for handler in logger.handlers: handler.level = level
[ "def", "change_logger_levels", "(", "logger", "=", "None", ",", "level", "=", "logging", ".", "DEBUG", ")", ":", "if", "not", "isinstance", "(", "logger", ",", "logging", ".", "Logger", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "logger", ")", "logger", ".", "setLevel", "(", "level", ")", "for", "handler", "in", "logger", ".", "handlers", ":", "handler", ".", "level", "=", "level" ]
35.285714
18.857143
def _flags_changed(self, name, old, new): """ensure flags dict is valid""" for key,value in new.iteritems(): assert len(value) == 2, "Bad flag: %r:%s"%(key,value) assert isinstance(value[0], (dict, Config)), "Bad flag: %r:%s"%(key,value) assert isinstance(value[1], basestring), "Bad flag: %r:%s"%(key,value)
[ "def", "_flags_changed", "(", "self", ",", "name", ",", "old", ",", "new", ")", ":", "for", "key", ",", "value", "in", "new", ".", "iteritems", "(", ")", ":", "assert", "len", "(", "value", ")", "==", "2", ",", "\"Bad flag: %r:%s\"", "%", "(", "key", ",", "value", ")", "assert", "isinstance", "(", "value", "[", "0", "]", ",", "(", "dict", ",", "Config", ")", ")", ",", "\"Bad flag: %r:%s\"", "%", "(", "key", ",", "value", ")", "assert", "isinstance", "(", "value", "[", "1", "]", ",", "basestring", ")", ",", "\"Bad flag: %r:%s\"", "%", "(", "key", ",", "value", ")" ]
59.166667
19.166667
def add_route(self, route): ''' Add a route object, but do not change the :data:`Route.app` attribute.''' self.routes.append(route) self.router.add(route.rule, route.method, route, name=route.name) if DEBUG: route.prepare()
[ "def", "add_route", "(", "self", ",", "route", ")", ":", "self", ".", "routes", ".", "append", "(", "route", ")", "self", ".", "router", ".", "add", "(", "route", ".", "rule", ",", "route", ".", "method", ",", "route", ",", "name", "=", "route", ".", "name", ")", "if", "DEBUG", ":", "route", ".", "prepare", "(", ")" ]
43.666667
17.666667
def file_enumerator(filepath, block_size=10240, *args, **kwargs): """Return an enumerator that knows how to read a physical file.""" _LOGGER.debug("Enumerating through archive file: %s", filepath) def opener(archive_res): _LOGGER.debug("Opening from file (file_enumerator): %s", filepath) _archive_read_open_filename(archive_res, filepath, block_size) if 'entry_cls' not in kwargs: kwargs['entry_cls'] = _ArchiveEntryItReadable return _enumerator(opener, *args, **kwargs)
[ "def", "file_enumerator", "(", "filepath", ",", "block_size", "=", "10240", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_LOGGER", ".", "debug", "(", "\"Enumerating through archive file: %s\"", ",", "filepath", ")", "def", "opener", "(", "archive_res", ")", ":", "_LOGGER", ".", "debug", "(", "\"Opening from file (file_enumerator): %s\"", ",", "filepath", ")", "_archive_read_open_filename", "(", "archive_res", ",", "filepath", ",", "block_size", ")", "if", "'entry_cls'", "not", "in", "kwargs", ":", "kwargs", "[", "'entry_cls'", "]", "=", "_ArchiveEntryItReadable", "return", "_enumerator", "(", "opener", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
36.733333
22.466667
def handle_call(self, frame, argument_list): """This method is called when there is the remote possibility that we ever need to stop in this function.""" fun = frame.f_code.co_name log.info('Calling: %r' % fun) init = 'Echo|%s' % dump({ 'for': '__call__', 'val': '%s(%s)' % ( fun, ', '.join([ '%s=%s' % (key, self.safe_better_repr(value)) for key, value in get_args(frame).items() ]) ) }) self.interaction( frame, init=init, exception_description='Calling %s' % fun )
[ "def", "handle_call", "(", "self", ",", "frame", ",", "argument_list", ")", ":", "fun", "=", "frame", ".", "f_code", ".", "co_name", "log", ".", "info", "(", "'Calling: %r'", "%", "fun", ")", "init", "=", "'Echo|%s'", "%", "dump", "(", "{", "'for'", ":", "'__call__'", ",", "'val'", ":", "'%s(%s)'", "%", "(", "fun", ",", "', '", ".", "join", "(", "[", "'%s=%s'", "%", "(", "key", ",", "self", ".", "safe_better_repr", "(", "value", ")", ")", "for", "key", ",", "value", "in", "get_args", "(", "frame", ")", ".", "items", "(", ")", "]", ")", ")", "}", ")", "self", ".", "interaction", "(", "frame", ",", "init", "=", "init", ",", "exception_description", "=", "'Calling %s'", "%", "fun", ")" ]
34.25
16.7
def sigterm_handler(signum, frame): '''Intercept sigterm and terminate all processes. ''' if captureproc and captureproc.poll() is None: captureproc.terminate() terminate(True) sys.exit(0)
[ "def", "sigterm_handler", "(", "signum", ",", "frame", ")", ":", "if", "captureproc", "and", "captureproc", ".", "poll", "(", ")", "is", "None", ":", "captureproc", ".", "terminate", "(", ")", "terminate", "(", "True", ")", "sys", ".", "exit", "(", "0", ")" ]
30
16.571429
def _calculate_credit_charge(self, message): """ Calculates the credit charge for a request based on the command. If connection.supports_multi_credit is not True then the credit charge isn't valid so it returns 0. The credit charge is the number of credits that are required for sending/receiving data over 64 kilobytes, in the existing messages only the Read, Write, Query Directory or IOCTL commands will end in this scenario and each require their own calculation to get the proper value. The generic formula for calculating the credit charge is https://msdn.microsoft.com/en-us/library/dn529312.aspx (max(SendPayloadSize, Expected ResponsePayloadSize) - 1) / 65536 + 1 :param message: The message being sent :return: The credit charge to set on the header """ credit_size = 65536 if not self.supports_multi_credit: credit_charge = 0 elif message.COMMAND == Commands.SMB2_READ: max_size = message['length'].get_value() + \ message['read_channel_info_length'].get_value() - 1 credit_charge = math.ceil(max_size / credit_size) elif message.COMMAND == Commands.SMB2_WRITE: max_size = message['length'].get_value() + \ message['write_channel_info_length'].get_value() - 1 credit_charge = math.ceil(max_size / credit_size) elif message.COMMAND == Commands.SMB2_IOCTL: max_in_size = len(message['buffer']) max_out_size = message['max_output_response'].get_value() max_size = max(max_in_size, max_out_size) - 1 credit_charge = math.ceil(max_size / credit_size) elif message.COMMAND == Commands.SMB2_QUERY_DIRECTORY: max_in_size = len(message['buffer']) max_out_size = message['output_buffer_length'].get_value() max_size = max(max_in_size, max_out_size) - 1 credit_charge = math.ceil(max_size / credit_size) else: credit_charge = 1 # python 2 returns a float where we need an integer return int(credit_charge)
[ "def", "_calculate_credit_charge", "(", "self", ",", "message", ")", ":", "credit_size", "=", "65536", "if", "not", "self", ".", "supports_multi_credit", ":", "credit_charge", "=", "0", "elif", "message", ".", "COMMAND", "==", "Commands", ".", "SMB2_READ", ":", "max_size", "=", "message", "[", "'length'", "]", ".", "get_value", "(", ")", "+", "message", "[", "'read_channel_info_length'", "]", ".", "get_value", "(", ")", "-", "1", "credit_charge", "=", "math", ".", "ceil", "(", "max_size", "/", "credit_size", ")", "elif", "message", ".", "COMMAND", "==", "Commands", ".", "SMB2_WRITE", ":", "max_size", "=", "message", "[", "'length'", "]", ".", "get_value", "(", ")", "+", "message", "[", "'write_channel_info_length'", "]", ".", "get_value", "(", ")", "-", "1", "credit_charge", "=", "math", ".", "ceil", "(", "max_size", "/", "credit_size", ")", "elif", "message", ".", "COMMAND", "==", "Commands", ".", "SMB2_IOCTL", ":", "max_in_size", "=", "len", "(", "message", "[", "'buffer'", "]", ")", "max_out_size", "=", "message", "[", "'max_output_response'", "]", ".", "get_value", "(", ")", "max_size", "=", "max", "(", "max_in_size", ",", "max_out_size", ")", "-", "1", "credit_charge", "=", "math", ".", "ceil", "(", "max_size", "/", "credit_size", ")", "elif", "message", ".", "COMMAND", "==", "Commands", ".", "SMB2_QUERY_DIRECTORY", ":", "max_in_size", "=", "len", "(", "message", "[", "'buffer'", "]", ")", "max_out_size", "=", "message", "[", "'output_buffer_length'", "]", ".", "get_value", "(", ")", "max_size", "=", "max", "(", "max_in_size", ",", "max_out_size", ")", "-", "1", "credit_charge", "=", "math", ".", "ceil", "(", "max_size", "/", "credit_size", ")", "else", ":", "credit_charge", "=", "1", "# python 2 returns a float where we need an integer", "return", "int", "(", "credit_charge", ")" ]
48.088889
21.511111
def _finalize_namespaces(self, ns_dict=None): """Returns a dictionary of namespaces to be exported with an XML document. This loops over all the namespaces that were discovered and built during the execution of ``collect()`` and ``_parse_collected_classes()`` and attempts to merge them all. Raises: .namespaces.DuplicatePrefixError: If namespace prefix was mapped to more than one namespace. .namespaces.NoPrefixError: If a namespace was collected that is not mapped to a prefix. """ if ns_dict: # Add the user's entries to our set for ns, alias in six.iteritems(ns_dict): self._collected_namespaces.add_namespace_uri(ns, alias) # Add the ID namespaces self._collected_namespaces.add_namespace_uri( ns_uri=idgen.get_id_namespace(), prefix=idgen.get_id_namespace_alias() ) # Remap the example namespace to the one expected by the APIs if the # sample example namespace is found. self._fix_example_namespace() # Add _input_namespaces for prefix, uri in six.iteritems(self._input_namespaces): self._collected_namespaces.add_namespace_uri(uri, prefix) # Add some default XML namespaces to make sure they're there. self._collected_namespaces.import_from(namespaces.XML_NAMESPACES) # python-stix's generateDS-generated binding classes can't handle # default namespaces. So make sure there are no preferred defaults in # the set. Get prefixes from the global namespace set if we have to. for ns_uri in self._collected_namespaces.namespace_uris: preferred_prefix = self._collected_namespaces.preferred_prefix_for_namespace(ns_uri) if preferred_prefix: continue # No preferred prefix set for namespace. Try to assign one. prefixes = self._collected_namespaces.get_prefixes(ns_uri) if prefixes: prefix = next(iter(prefixes)) else: prefix = namespaces.lookup_name(ns_uri) if prefix is None: raise namespaces.NoPrefixesError(ns_uri) self._collected_namespaces.set_preferred_prefix_for_namespace( ns_uri=ns_uri, prefix=prefix, add_if_not_exist=True )
[ "def", "_finalize_namespaces", "(", "self", ",", "ns_dict", "=", "None", ")", ":", "if", "ns_dict", ":", "# Add the user's entries to our set", "for", "ns", ",", "alias", "in", "six", ".", "iteritems", "(", "ns_dict", ")", ":", "self", ".", "_collected_namespaces", ".", "add_namespace_uri", "(", "ns", ",", "alias", ")", "# Add the ID namespaces", "self", ".", "_collected_namespaces", ".", "add_namespace_uri", "(", "ns_uri", "=", "idgen", ".", "get_id_namespace", "(", ")", ",", "prefix", "=", "idgen", ".", "get_id_namespace_alias", "(", ")", ")", "# Remap the example namespace to the one expected by the APIs if the", "# sample example namespace is found.", "self", ".", "_fix_example_namespace", "(", ")", "# Add _input_namespaces", "for", "prefix", ",", "uri", "in", "six", ".", "iteritems", "(", "self", ".", "_input_namespaces", ")", ":", "self", ".", "_collected_namespaces", ".", "add_namespace_uri", "(", "uri", ",", "prefix", ")", "# Add some default XML namespaces to make sure they're there.", "self", ".", "_collected_namespaces", ".", "import_from", "(", "namespaces", ".", "XML_NAMESPACES", ")", "# python-stix's generateDS-generated binding classes can't handle", "# default namespaces. So make sure there are no preferred defaults in", "# the set. Get prefixes from the global namespace set if we have to.", "for", "ns_uri", "in", "self", ".", "_collected_namespaces", ".", "namespace_uris", ":", "preferred_prefix", "=", "self", ".", "_collected_namespaces", ".", "preferred_prefix_for_namespace", "(", "ns_uri", ")", "if", "preferred_prefix", ":", "continue", "# No preferred prefix set for namespace. Try to assign one.", "prefixes", "=", "self", ".", "_collected_namespaces", ".", "get_prefixes", "(", "ns_uri", ")", "if", "prefixes", ":", "prefix", "=", "next", "(", "iter", "(", "prefixes", ")", ")", "else", ":", "prefix", "=", "namespaces", ".", "lookup_name", "(", "ns_uri", ")", "if", "prefix", "is", "None", ":", "raise", "namespaces", ".", "NoPrefixesError", "(", "ns_uri", ")", "self", ".", "_collected_namespaces", ".", "set_preferred_prefix_for_namespace", "(", "ns_uri", "=", "ns_uri", ",", "prefix", "=", "prefix", ",", "add_if_not_exist", "=", "True", ")" ]
39.672131
23.393443
def jsonify_timedelta(value): """Converts a `datetime.timedelta` to an ISO 8601 duration string for JSON-ification. :param value: something to convert :type value: datetime.timedelta :return: the value after conversion :rtype unicode """ assert isinstance(value, datetime.timedelta) # split seconds to larger units seconds = value.total_seconds() minutes, seconds = divmod(seconds, 60) hours, minutes = divmod(minutes, 60) days, hours = divmod(hours, 24) days, hours, minutes = map(int, (days, hours, minutes)) seconds = round(seconds, 6) # build date date = '' if days: date = '%sD' % days # build time time = u'T' # hours bigger_exists = date or hours if bigger_exists: time += '{:02}H'.format(hours) # minutes bigger_exists = bigger_exists or minutes if bigger_exists: time += '{:02}M'.format(minutes) # seconds if seconds.is_integer(): seconds = '{:02}'.format(int(seconds)) else: # 9 chars long w/leading 0, 6 digits after decimal seconds = '%09.6f' % seconds # remove trailing zeros seconds = seconds.rstrip('0') time += '{}S'.format(seconds) return u'P' + date + time
[ "def", "jsonify_timedelta", "(", "value", ")", ":", "assert", "isinstance", "(", "value", ",", "datetime", ".", "timedelta", ")", "# split seconds to larger units", "seconds", "=", "value", ".", "total_seconds", "(", ")", "minutes", ",", "seconds", "=", "divmod", "(", "seconds", ",", "60", ")", "hours", ",", "minutes", "=", "divmod", "(", "minutes", ",", "60", ")", "days", ",", "hours", "=", "divmod", "(", "hours", ",", "24", ")", "days", ",", "hours", ",", "minutes", "=", "map", "(", "int", ",", "(", "days", ",", "hours", ",", "minutes", ")", ")", "seconds", "=", "round", "(", "seconds", ",", "6", ")", "# build date", "date", "=", "''", "if", "days", ":", "date", "=", "'%sD'", "%", "days", "# build time", "time", "=", "u'T'", "# hours", "bigger_exists", "=", "date", "or", "hours", "if", "bigger_exists", ":", "time", "+=", "'{:02}H'", ".", "format", "(", "hours", ")", "# minutes", "bigger_exists", "=", "bigger_exists", "or", "minutes", "if", "bigger_exists", ":", "time", "+=", "'{:02}M'", ".", "format", "(", "minutes", ")", "# seconds", "if", "seconds", ".", "is_integer", "(", ")", ":", "seconds", "=", "'{:02}'", ".", "format", "(", "int", "(", "seconds", ")", ")", "else", ":", "# 9 chars long w/leading 0, 6 digits after decimal", "seconds", "=", "'%09.6f'", "%", "seconds", "# remove trailing zeros", "seconds", "=", "seconds", ".", "rstrip", "(", "'0'", ")", "time", "+=", "'{}S'", ".", "format", "(", "seconds", ")", "return", "u'P'", "+", "date", "+", "time" ]
23.519231
18.461538
def stop(self): """Stop the thread, making this object unusable.""" if not self._dead: self._killed = True self._cancelled.set() self._busy_sem.release() self.join() if not self._ready_sem.acquire(False): warning("ISOTP Timer thread may not have stopped " "correctly")
[ "def", "stop", "(", "self", ")", ":", "if", "not", "self", ".", "_dead", ":", "self", ".", "_killed", "=", "True", "self", ".", "_cancelled", ".", "set", "(", ")", "self", ".", "_busy_sem", ".", "release", "(", ")", "self", ".", "join", "(", ")", "if", "not", "self", ".", "_ready_sem", ".", "acquire", "(", "False", ")", ":", "warning", "(", "\"ISOTP Timer thread may not have stopped \"", "\"correctly\"", ")" ]
37.5
11.6
def fork(self, state, expression, policy='ALL', setstate=None): """ Fork state on expression concretizations. Using policy build a list of solutions for expression. For the state on each solution setting the new state with setstate For example if expression is a Bool it may have 2 solutions. True or False. Parent (expression = ??) Child1 Child2 (expression = True) (expression = True) setstate(True) setstate(False) The optional setstate() function is supposed to set the concrete value in the child state. """ assert isinstance(expression, Expression) if setstate is None: setstate = lambda x, y: None # Find a set of solutions for expression solutions = state.concretize(expression, policy) if not solutions: raise ExecutorError("Forking on unfeasible constraint set") if len(solutions) == 1: setstate(state, solutions[0]) return state logger.info("Forking. Policy: %s. Values: %s", policy, ', '.join(f'0x{sol:x}' for sol in solutions)) self._publish('will_fork_state', state, expression, solutions, policy) # Build and enqueue a state for each solution children = [] for new_value in solutions: with state as new_state: new_state.constrain(expression == new_value) # and set the PC of the new state to the concrete pc-dest #(or other register or memory address to concrete) setstate(new_state, new_value) self._publish('did_fork_state', new_state, expression, new_value, policy) # enqueue new_state state_id = self.enqueue(new_state) # maintain a list of children for logging purpose children.append(state_id) logger.info("Forking current state into states %r", children) return None
[ "def", "fork", "(", "self", ",", "state", ",", "expression", ",", "policy", "=", "'ALL'", ",", "setstate", "=", "None", ")", ":", "assert", "isinstance", "(", "expression", ",", "Expression", ")", "if", "setstate", "is", "None", ":", "setstate", "=", "lambda", "x", ",", "y", ":", "None", "# Find a set of solutions for expression", "solutions", "=", "state", ".", "concretize", "(", "expression", ",", "policy", ")", "if", "not", "solutions", ":", "raise", "ExecutorError", "(", "\"Forking on unfeasible constraint set\"", ")", "if", "len", "(", "solutions", ")", "==", "1", ":", "setstate", "(", "state", ",", "solutions", "[", "0", "]", ")", "return", "state", "logger", ".", "info", "(", "\"Forking. Policy: %s. Values: %s\"", ",", "policy", ",", "', '", ".", "join", "(", "f'0x{sol:x}'", "for", "sol", "in", "solutions", ")", ")", "self", ".", "_publish", "(", "'will_fork_state'", ",", "state", ",", "expression", ",", "solutions", ",", "policy", ")", "# Build and enqueue a state for each solution", "children", "=", "[", "]", "for", "new_value", "in", "solutions", ":", "with", "state", "as", "new_state", ":", "new_state", ".", "constrain", "(", "expression", "==", "new_value", ")", "# and set the PC of the new state to the concrete pc-dest", "#(or other register or memory address to concrete)", "setstate", "(", "new_state", ",", "new_value", ")", "self", ".", "_publish", "(", "'did_fork_state'", ",", "new_state", ",", "expression", ",", "new_value", ",", "policy", ")", "# enqueue new_state", "state_id", "=", "self", ".", "enqueue", "(", "new_state", ")", "# maintain a list of children for logging purpose", "children", ".", "append", "(", "state_id", ")", "logger", ".", "info", "(", "\"Forking current state into states %r\"", ",", "children", ")", "return", "None" ]
35.915254
23.101695
def disable_detailed_monitoring(name, call=None): ''' Enable/disable detailed monitoring on a node CLI Example: ''' if call != 'action': raise SaltCloudSystemExit( 'The enable_term_protect action must be called with ' '-a or --action.' ) instance_id = _get_node(name)['instanceId'] params = {'Action': 'UnmonitorInstances', 'InstanceId.1': instance_id} result = aws.query(params, location=get_location(), provider=get_provider(), return_root=True, opts=__opts__, sigver='4') return show_detailed_monitoring(name=name, instance_id=instance_id, call='action')
[ "def", "disable_detailed_monitoring", "(", "name", ",", "call", "=", "None", ")", ":", "if", "call", "!=", "'action'", ":", "raise", "SaltCloudSystemExit", "(", "'The enable_term_protect action must be called with '", "'-a or --action.'", ")", "instance_id", "=", "_get_node", "(", "name", ")", "[", "'instanceId'", "]", "params", "=", "{", "'Action'", ":", "'UnmonitorInstances'", ",", "'InstanceId.1'", ":", "instance_id", "}", "result", "=", "aws", ".", "query", "(", "params", ",", "location", "=", "get_location", "(", ")", ",", "provider", "=", "get_provider", "(", ")", ",", "return_root", "=", "True", ",", "opts", "=", "__opts__", ",", "sigver", "=", "'4'", ")", "return", "show_detailed_monitoring", "(", "name", "=", "name", ",", "instance_id", "=", "instance_id", ",", "call", "=", "'action'", ")" ]
30.958333
18.708333
def code_events(self): """Returns processed memory usage.""" if self._resulting_events: return self._resulting_events for i, (lineno, mem, func, fname) in enumerate(self._events_list): mem_in_mb = float(mem - self.mem_overhead) / _BYTES_IN_MB if (self._resulting_events and self._resulting_events[-1][0] == lineno and self._resulting_events[-1][2] == func and self._resulting_events[-1][3] == fname and self._resulting_events[-1][1] < mem_in_mb): self._resulting_events[-1][1] = mem_in_mb else: self._resulting_events.append( [i + 1, lineno, mem_in_mb, func, fname]) return self._resulting_events
[ "def", "code_events", "(", "self", ")", ":", "if", "self", ".", "_resulting_events", ":", "return", "self", ".", "_resulting_events", "for", "i", ",", "(", "lineno", ",", "mem", ",", "func", ",", "fname", ")", "in", "enumerate", "(", "self", ".", "_events_list", ")", ":", "mem_in_mb", "=", "float", "(", "mem", "-", "self", ".", "mem_overhead", ")", "/", "_BYTES_IN_MB", "if", "(", "self", ".", "_resulting_events", "and", "self", ".", "_resulting_events", "[", "-", "1", "]", "[", "0", "]", "==", "lineno", "and", "self", ".", "_resulting_events", "[", "-", "1", "]", "[", "2", "]", "==", "func", "and", "self", ".", "_resulting_events", "[", "-", "1", "]", "[", "3", "]", "==", "fname", "and", "self", ".", "_resulting_events", "[", "-", "1", "]", "[", "1", "]", "<", "mem_in_mb", ")", ":", "self", ".", "_resulting_events", "[", "-", "1", "]", "[", "1", "]", "=", "mem_in_mb", "else", ":", "self", ".", "_resulting_events", ".", "append", "(", "[", "i", "+", "1", ",", "lineno", ",", "mem_in_mb", ",", "func", ",", "fname", "]", ")", "return", "self", ".", "_resulting_events" ]
49.5625
15.5
def PushItem(self, item, block=True): """Pushes an item onto the queue. Args: item (object): item to add. block (Optional[bool]): True to block the process when the queue is full. Raises: QueueFull: if the item could not be pushed the queue because it's full. """ try: self._queue.put(item, block=block) except Queue.Full as exception: raise errors.QueueFull(exception)
[ "def", "PushItem", "(", "self", ",", "item", ",", "block", "=", "True", ")", ":", "try", ":", "self", ".", "_queue", ".", "put", "(", "item", ",", "block", "=", "block", ")", "except", "Queue", ".", "Full", "as", "exception", ":", "raise", "errors", ".", "QueueFull", "(", "exception", ")" ]
29.428571
18.857143
def get_instance(self, payload): """ Build an instance of IpAccessControlListInstance :param dict payload: Payload response from the API :returns: twilio.rest.api.v2010.account.sip.ip_access_control_list.IpAccessControlListInstance :rtype: twilio.rest.api.v2010.account.sip.ip_access_control_list.IpAccessControlListInstance """ return IpAccessControlListInstance( self._version, payload, account_sid=self._solution['account_sid'], )
[ "def", "get_instance", "(", "self", ",", "payload", ")", ":", "return", "IpAccessControlListInstance", "(", "self", ".", "_version", ",", "payload", ",", "account_sid", "=", "self", ".", "_solution", "[", "'account_sid'", "]", ",", ")" ]
37.285714
23.285714
def write_to_screen(self, cli, screen, mouse_handlers, write_position): """ Write window to screen. This renders the user control, the margins and copies everything over to the absolute position at the given screen. """ # Calculate margin sizes. left_margin_widths = [self._get_margin_width(cli, m) for m in self.left_margins] right_margin_widths = [self._get_margin_width(cli, m) for m in self.right_margins] total_margin_width = sum(left_margin_widths + right_margin_widths) # Render UserControl. ui_content = self.content.create_content( cli, write_position.width - total_margin_width, write_position.height) assert isinstance(ui_content, UIContent) # Scroll content. wrap_lines = self.wrap_lines(cli) scroll_func = self._scroll_when_linewrapping if wrap_lines else self._scroll_without_linewrapping scroll_func( ui_content, write_position.width - total_margin_width, write_position.height, cli) # Write body visible_line_to_row_col, rowcol_to_yx = self._copy_body( cli, ui_content, screen, write_position, sum(left_margin_widths), write_position.width - total_margin_width, self.vertical_scroll, self.horizontal_scroll, has_focus=self.content.has_focus(cli), wrap_lines=wrap_lines, highlight_lines=True, vertical_scroll_2=self.vertical_scroll_2, always_hide_cursor=self.always_hide_cursor(cli)) # Remember render info. (Set before generating the margins. They need this.) x_offset=write_position.xpos + sum(left_margin_widths) y_offset=write_position.ypos self.render_info = WindowRenderInfo( ui_content=ui_content, horizontal_scroll=self.horizontal_scroll, vertical_scroll=self.vertical_scroll, window_width=write_position.width - total_margin_width, window_height=write_position.height, configured_scroll_offsets=self.scroll_offsets, visible_line_to_row_col=visible_line_to_row_col, rowcol_to_yx=rowcol_to_yx, x_offset=x_offset, y_offset=y_offset, wrap_lines=wrap_lines) # Set mouse handlers. def mouse_handler(cli, mouse_event): """ Wrapper around the mouse_handler of the `UIControl` that turns screen coordinates into line coordinates. """ # Find row/col position first. yx_to_rowcol = dict((v, k) for k, v in rowcol_to_yx.items()) y = mouse_event.position.y x = mouse_event.position.x # If clicked below the content area, look for a position in the # last line instead. max_y = write_position.ypos + len(visible_line_to_row_col) - 1 y = min(max_y, y) while x >= 0: try: row, col = yx_to_rowcol[y, x] except KeyError: # Try again. (When clicking on the right side of double # width characters, or on the right side of the input.) x -= 1 else: # Found position, call handler of UIControl. result = self.content.mouse_handler( cli, MouseEvent(position=Point(x=col, y=row), event_type=mouse_event.event_type)) break else: # nobreak. # (No x/y coordinate found for the content. This happens in # case of a FillControl, that only specifies a background, but # doesn't have a content. Report (0,0) instead.) result = self.content.mouse_handler( cli, MouseEvent(position=Point(x=0, y=0), event_type=mouse_event.event_type)) # If it returns NotImplemented, handle it here. if result == NotImplemented: return self._mouse_handler(cli, mouse_event) return result mouse_handlers.set_mouse_handler_for_range( x_min=write_position.xpos + sum(left_margin_widths), x_max=write_position.xpos + write_position.width - total_margin_width, y_min=write_position.ypos, y_max=write_position.ypos + write_position.height, handler=mouse_handler) # Render and copy margins. move_x = 0 def render_margin(m, width): " Render margin. Return `Screen`. " # Retrieve margin tokens. tokens = m.create_margin(cli, self.render_info, width, write_position.height) # Turn it into a UIContent object. # already rendered those tokens using this size.) return TokenListControl.static(tokens).create_content( cli, width + 1, write_position.height) for m, width in zip(self.left_margins, left_margin_widths): # Create screen for margin. margin_screen = render_margin(m, width) # Copy and shift X. self._copy_margin(cli, margin_screen, screen, write_position, move_x, width) move_x += width move_x = write_position.width - sum(right_margin_widths) for m, width in zip(self.right_margins, right_margin_widths): # Create screen for margin. margin_screen = render_margin(m, width) # Copy and shift X. self._copy_margin(cli, margin_screen, screen, write_position, move_x, width) move_x += width
[ "def", "write_to_screen", "(", "self", ",", "cli", ",", "screen", ",", "mouse_handlers", ",", "write_position", ")", ":", "# Calculate margin sizes.", "left_margin_widths", "=", "[", "self", ".", "_get_margin_width", "(", "cli", ",", "m", ")", "for", "m", "in", "self", ".", "left_margins", "]", "right_margin_widths", "=", "[", "self", ".", "_get_margin_width", "(", "cli", ",", "m", ")", "for", "m", "in", "self", ".", "right_margins", "]", "total_margin_width", "=", "sum", "(", "left_margin_widths", "+", "right_margin_widths", ")", "# Render UserControl.", "ui_content", "=", "self", ".", "content", ".", "create_content", "(", "cli", ",", "write_position", ".", "width", "-", "total_margin_width", ",", "write_position", ".", "height", ")", "assert", "isinstance", "(", "ui_content", ",", "UIContent", ")", "# Scroll content.", "wrap_lines", "=", "self", ".", "wrap_lines", "(", "cli", ")", "scroll_func", "=", "self", ".", "_scroll_when_linewrapping", "if", "wrap_lines", "else", "self", ".", "_scroll_without_linewrapping", "scroll_func", "(", "ui_content", ",", "write_position", ".", "width", "-", "total_margin_width", ",", "write_position", ".", "height", ",", "cli", ")", "# Write body", "visible_line_to_row_col", ",", "rowcol_to_yx", "=", "self", ".", "_copy_body", "(", "cli", ",", "ui_content", ",", "screen", ",", "write_position", ",", "sum", "(", "left_margin_widths", ")", ",", "write_position", ".", "width", "-", "total_margin_width", ",", "self", ".", "vertical_scroll", ",", "self", ".", "horizontal_scroll", ",", "has_focus", "=", "self", ".", "content", ".", "has_focus", "(", "cli", ")", ",", "wrap_lines", "=", "wrap_lines", ",", "highlight_lines", "=", "True", ",", "vertical_scroll_2", "=", "self", ".", "vertical_scroll_2", ",", "always_hide_cursor", "=", "self", ".", "always_hide_cursor", "(", "cli", ")", ")", "# Remember render info. (Set before generating the margins. They need this.)", "x_offset", "=", "write_position", ".", "xpos", "+", "sum", "(", "left_margin_widths", ")", "y_offset", "=", "write_position", ".", "ypos", "self", ".", "render_info", "=", "WindowRenderInfo", "(", "ui_content", "=", "ui_content", ",", "horizontal_scroll", "=", "self", ".", "horizontal_scroll", ",", "vertical_scroll", "=", "self", ".", "vertical_scroll", ",", "window_width", "=", "write_position", ".", "width", "-", "total_margin_width", ",", "window_height", "=", "write_position", ".", "height", ",", "configured_scroll_offsets", "=", "self", ".", "scroll_offsets", ",", "visible_line_to_row_col", "=", "visible_line_to_row_col", ",", "rowcol_to_yx", "=", "rowcol_to_yx", ",", "x_offset", "=", "x_offset", ",", "y_offset", "=", "y_offset", ",", "wrap_lines", "=", "wrap_lines", ")", "# Set mouse handlers.", "def", "mouse_handler", "(", "cli", ",", "mouse_event", ")", ":", "\"\"\" Wrapper around the mouse_handler of the `UIControl` that turns\n screen coordinates into line coordinates. \"\"\"", "# Find row/col position first.", "yx_to_rowcol", "=", "dict", "(", "(", "v", ",", "k", ")", "for", "k", ",", "v", "in", "rowcol_to_yx", ".", "items", "(", ")", ")", "y", "=", "mouse_event", ".", "position", ".", "y", "x", "=", "mouse_event", ".", "position", ".", "x", "# If clicked below the content area, look for a position in the", "# last line instead.", "max_y", "=", "write_position", ".", "ypos", "+", "len", "(", "visible_line_to_row_col", ")", "-", "1", "y", "=", "min", "(", "max_y", ",", "y", ")", "while", "x", ">=", "0", ":", "try", ":", "row", ",", "col", "=", "yx_to_rowcol", "[", "y", ",", "x", "]", "except", "KeyError", ":", "# Try again. (When clicking on the right side of double", "# width characters, or on the right side of the input.)", "x", "-=", "1", "else", ":", "# Found position, call handler of UIControl.", "result", "=", "self", ".", "content", ".", "mouse_handler", "(", "cli", ",", "MouseEvent", "(", "position", "=", "Point", "(", "x", "=", "col", ",", "y", "=", "row", ")", ",", "event_type", "=", "mouse_event", ".", "event_type", ")", ")", "break", "else", ":", "# nobreak.", "# (No x/y coordinate found for the content. This happens in", "# case of a FillControl, that only specifies a background, but", "# doesn't have a content. Report (0,0) instead.)", "result", "=", "self", ".", "content", ".", "mouse_handler", "(", "cli", ",", "MouseEvent", "(", "position", "=", "Point", "(", "x", "=", "0", ",", "y", "=", "0", ")", ",", "event_type", "=", "mouse_event", ".", "event_type", ")", ")", "# If it returns NotImplemented, handle it here.", "if", "result", "==", "NotImplemented", ":", "return", "self", ".", "_mouse_handler", "(", "cli", ",", "mouse_event", ")", "return", "result", "mouse_handlers", ".", "set_mouse_handler_for_range", "(", "x_min", "=", "write_position", ".", "xpos", "+", "sum", "(", "left_margin_widths", ")", ",", "x_max", "=", "write_position", ".", "xpos", "+", "write_position", ".", "width", "-", "total_margin_width", ",", "y_min", "=", "write_position", ".", "ypos", ",", "y_max", "=", "write_position", ".", "ypos", "+", "write_position", ".", "height", ",", "handler", "=", "mouse_handler", ")", "# Render and copy margins.", "move_x", "=", "0", "def", "render_margin", "(", "m", ",", "width", ")", ":", "\" Render margin. Return `Screen`. \"", "# Retrieve margin tokens.", "tokens", "=", "m", ".", "create_margin", "(", "cli", ",", "self", ".", "render_info", ",", "width", ",", "write_position", ".", "height", ")", "# Turn it into a UIContent object.", "# already rendered those tokens using this size.)", "return", "TokenListControl", ".", "static", "(", "tokens", ")", ".", "create_content", "(", "cli", ",", "width", "+", "1", ",", "write_position", ".", "height", ")", "for", "m", ",", "width", "in", "zip", "(", "self", ".", "left_margins", ",", "left_margin_widths", ")", ":", "# Create screen for margin.", "margin_screen", "=", "render_margin", "(", "m", ",", "width", ")", "# Copy and shift X.", "self", ".", "_copy_margin", "(", "cli", ",", "margin_screen", ",", "screen", ",", "write_position", ",", "move_x", ",", "width", ")", "move_x", "+=", "width", "move_x", "=", "write_position", ".", "width", "-", "sum", "(", "right_margin_widths", ")", "for", "m", ",", "width", "in", "zip", "(", "self", ".", "right_margins", ",", "right_margin_widths", ")", ":", "# Create screen for margin.", "margin_screen", "=", "render_margin", "(", "m", ",", "width", ")", "# Copy and shift X.", "self", ".", "_copy_margin", "(", "cli", ",", "margin_screen", ",", "screen", ",", "write_position", ",", "move_x", ",", "width", ")", "move_x", "+=", "width" ]
43.609375
22.023438
def set_pin_retries(ctx, pw_attempts, admin_pin, force): """ Manage pin-retries. Sets the number of attempts available before locking for each PIN. PW_ATTEMPTS should be three integer values corresponding to the number of attempts for the PIN, Reset Code, and Admin PIN, respectively. """ controller = ctx.obj['controller'] resets_pins = controller.version < (4, 0, 0) if resets_pins: click.echo('WARNING: Setting PIN retries will reset the values for all ' '3 PINs!') force or click.confirm('Set PIN retry counters to: {} {} {}?'.format( *pw_attempts), abort=True, err=True) controller.set_pin_retries(*(pw_attempts + (admin_pin.encode('utf8'),))) click.echo('PIN retries successfully set.') if resets_pins: click.echo('Default PINs are set.') echo_default_pins()
[ "def", "set_pin_retries", "(", "ctx", ",", "pw_attempts", ",", "admin_pin", ",", "force", ")", ":", "controller", "=", "ctx", ".", "obj", "[", "'controller'", "]", "resets_pins", "=", "controller", ".", "version", "<", "(", "4", ",", "0", ",", "0", ")", "if", "resets_pins", ":", "click", ".", "echo", "(", "'WARNING: Setting PIN retries will reset the values for all '", "'3 PINs!'", ")", "force", "or", "click", ".", "confirm", "(", "'Set PIN retry counters to: {} {} {}?'", ".", "format", "(", "*", "pw_attempts", ")", ",", "abort", "=", "True", ",", "err", "=", "True", ")", "controller", ".", "set_pin_retries", "(", "*", "(", "pw_attempts", "+", "(", "admin_pin", ".", "encode", "(", "'utf8'", ")", ",", ")", ")", ")", "click", ".", "echo", "(", "'PIN retries successfully set.'", ")", "if", "resets_pins", ":", "click", ".", "echo", "(", "'Default PINs are set.'", ")", "echo_default_pins", "(", ")" ]
40.428571
19.285714
def _multiline_width(multiline_s, line_width_fn=len): """Visible width of a potentially multiline content.""" return max(map(line_width_fn, re.split("[\r\n]", multiline_s)))
[ "def", "_multiline_width", "(", "multiline_s", ",", "line_width_fn", "=", "len", ")", ":", "return", "max", "(", "map", "(", "line_width_fn", ",", "re", ".", "split", "(", "\"[\\r\\n]\"", ",", "multiline_s", ")", ")", ")" ]
59.666667
13.333333
def _deconv_rl_gpu_conv(data_g, h_g, Niter=10): """ using convolve """ # set up some gpu buffers u_g = OCLArray.empty(data_g.shape, np.float32) u_g.copy_buffer(data_g) tmp_g = OCLArray.empty(data_g.shape, np.float32) tmp2_g = OCLArray.empty(data_g.shape, np.float32) # fix this hflip_g = OCLArray.from_array((h_g.get()[::-1, ::-1]).copy()) for i in range(Niter): convolve(u_g, h_g, res_g=tmp_g) _divide_inplace(data_g, tmp_g) # return data_g, tmp_g convolve(tmp_g, hflip_g, res_g=tmp2_g) _multiply_inplace(u_g, tmp2_g) return u_g
[ "def", "_deconv_rl_gpu_conv", "(", "data_g", ",", "h_g", ",", "Niter", "=", "10", ")", ":", "# set up some gpu buffers", "u_g", "=", "OCLArray", ".", "empty", "(", "data_g", ".", "shape", ",", "np", ".", "float32", ")", "u_g", ".", "copy_buffer", "(", "data_g", ")", "tmp_g", "=", "OCLArray", ".", "empty", "(", "data_g", ".", "shape", ",", "np", ".", "float32", ")", "tmp2_g", "=", "OCLArray", ".", "empty", "(", "data_g", ".", "shape", ",", "np", ".", "float32", ")", "# fix this", "hflip_g", "=", "OCLArray", ".", "from_array", "(", "(", "h_g", ".", "get", "(", ")", "[", ":", ":", "-", "1", ",", ":", ":", "-", "1", "]", ")", ".", "copy", "(", ")", ")", "for", "i", "in", "range", "(", "Niter", ")", ":", "convolve", "(", "u_g", ",", "h_g", ",", "res_g", "=", "tmp_g", ")", "_divide_inplace", "(", "data_g", ",", "tmp_g", ")", "# return data_g, tmp_g", "convolve", "(", "tmp_g", ",", "hflip_g", ",", "res_g", "=", "tmp2_g", ")", "_multiply_inplace", "(", "u_g", ",", "tmp2_g", ")", "return", "u_g" ]
17.555556
24.333333
def generate_inverse_mapping(order): """Genereate a lambda entry -> PN order map. This function will generate the opposite of generate mapping. So where generate_mapping gives dict[key] = item this will give dict[item] = key. Valid PN orders are: {} Parameters ---------- order : string A string containing a PN order. Valid values are given above. Returns -------- mapping : dictionary An inverse mapping between the active Lambda terms and index in the metric """ mapping = generate_mapping(order) inv_mapping = {} for key,value in mapping.items(): inv_mapping[value] = key return inv_mapping
[ "def", "generate_inverse_mapping", "(", "order", ")", ":", "mapping", "=", "generate_mapping", "(", "order", ")", "inv_mapping", "=", "{", "}", "for", "key", ",", "value", "in", "mapping", ".", "items", "(", ")", ":", "inv_mapping", "[", "value", "]", "=", "key", "return", "inv_mapping" ]
26.8
21.64
def _safe_read(path, length): """Read file contents.""" if not os.path.exists(os.path.join(HERE, path)): return '' file_handle = codecs.open(os.path.join(HERE, path), encoding='utf-8') contents = file_handle.read(length) file_handle.close() return contents
[ "def", "_safe_read", "(", "path", ",", "length", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "HERE", ",", "path", ")", ")", ":", "return", "''", "file_handle", "=", "codecs", ".", "open", "(", "os", ".", "path", ".", "join", "(", "HERE", ",", "path", ")", ",", "encoding", "=", "'utf-8'", ")", "contents", "=", "file_handle", ".", "read", "(", "length", ")", "file_handle", ".", "close", "(", ")", "return", "contents" ]
35.125
14.75
def get_fun(fun): ''' Return a dict of the last function called for all minions ''' query = '''SELECT minion_id, last_fun FROM {keyspace}.minions WHERE last_fun = ?;'''.format(keyspace=_get_keyspace()) ret = {} # cassandra_cql.cql_query may raise a CommandExecutionError try: data = __salt__['cassandra_cql.cql_query'](query, 'get_fun', [fun]) if data: for row in data: minion = row.get('minion_id') last_fun = row.get('last_fun') if minion and last_fun: ret[minion] = last_fun except CommandExecutionError: log.critical('Could not get the list of minions.') raise except Exception as e: log.critical( 'Unexpected error while getting list of minions: %s', e) raise return ret
[ "def", "get_fun", "(", "fun", ")", ":", "query", "=", "'''SELECT minion_id, last_fun FROM {keyspace}.minions\n WHERE last_fun = ?;'''", ".", "format", "(", "keyspace", "=", "_get_keyspace", "(", ")", ")", "ret", "=", "{", "}", "# cassandra_cql.cql_query may raise a CommandExecutionError", "try", ":", "data", "=", "__salt__", "[", "'cassandra_cql.cql_query'", "]", "(", "query", ",", "'get_fun'", ",", "[", "fun", "]", ")", "if", "data", ":", "for", "row", "in", "data", ":", "minion", "=", "row", ".", "get", "(", "'minion_id'", ")", "last_fun", "=", "row", ".", "get", "(", "'last_fun'", ")", "if", "minion", "and", "last_fun", ":", "ret", "[", "minion", "]", "=", "last_fun", "except", "CommandExecutionError", ":", "log", ".", "critical", "(", "'Could not get the list of minions.'", ")", "raise", "except", "Exception", "as", "e", ":", "log", ".", "critical", "(", "'Unexpected error while getting list of minions: %s'", ",", "e", ")", "raise", "return", "ret" ]
31.37037
22.925926
def _convert_from_thrift_binary_annotations(self, thrift_binary_annotations): """Accepts a thrift decoded binary annotation and converts it to a v1 binary annotation. """ tags = {} local_endpoint = None remote_endpoint = None for binary_annotation in thrift_binary_annotations: if binary_annotation.key == 'sa': remote_endpoint = self._convert_from_thrift_endpoint( thrift_endpoint=binary_annotation.host, ) else: key = binary_annotation.key annotation_type = binary_annotation.annotation_type value = binary_annotation.value if annotation_type == zipkin_core.AnnotationType.BOOL: tags[key] = "true" if value == 1 else "false" elif annotation_type == zipkin_core.AnnotationType.STRING: tags[key] = str(value) else: log.warning('Only STRING and BOOL binary annotations are ' 'supported right now and can be properly decoded.') if binary_annotation.host: local_endpoint = self._convert_from_thrift_endpoint( thrift_endpoint=binary_annotation.host, ) return tags, local_endpoint, remote_endpoint
[ "def", "_convert_from_thrift_binary_annotations", "(", "self", ",", "thrift_binary_annotations", ")", ":", "tags", "=", "{", "}", "local_endpoint", "=", "None", "remote_endpoint", "=", "None", "for", "binary_annotation", "in", "thrift_binary_annotations", ":", "if", "binary_annotation", ".", "key", "==", "'sa'", ":", "remote_endpoint", "=", "self", ".", "_convert_from_thrift_endpoint", "(", "thrift_endpoint", "=", "binary_annotation", ".", "host", ",", ")", "else", ":", "key", "=", "binary_annotation", ".", "key", "annotation_type", "=", "binary_annotation", ".", "annotation_type", "value", "=", "binary_annotation", ".", "value", "if", "annotation_type", "==", "zipkin_core", ".", "AnnotationType", ".", "BOOL", ":", "tags", "[", "key", "]", "=", "\"true\"", "if", "value", "==", "1", "else", "\"false\"", "elif", "annotation_type", "==", "zipkin_core", ".", "AnnotationType", ".", "STRING", ":", "tags", "[", "key", "]", "=", "str", "(", "value", ")", "else", ":", "log", ".", "warning", "(", "'Only STRING and BOOL binary annotations are '", "'supported right now and can be properly decoded.'", ")", "if", "binary_annotation", ".", "host", ":", "local_endpoint", "=", "self", ".", "_convert_from_thrift_endpoint", "(", "thrift_endpoint", "=", "binary_annotation", ".", "host", ",", ")", "return", "tags", ",", "local_endpoint", ",", "remote_endpoint" ]
41.606061
21.848485
def mag_yaw(RAW_IMU, inclination, declination): '''estimate yaw from mag''' m = mag_rotation(RAW_IMU, inclination, declination) (r, p, y) = m.to_euler() y = degrees(y) if y < 0: y += 360 return y
[ "def", "mag_yaw", "(", "RAW_IMU", ",", "inclination", ",", "declination", ")", ":", "m", "=", "mag_rotation", "(", "RAW_IMU", ",", "inclination", ",", "declination", ")", "(", "r", ",", "p", ",", "y", ")", "=", "m", ".", "to_euler", "(", ")", "y", "=", "degrees", "(", "y", ")", "if", "y", "<", "0", ":", "y", "+=", "360", "return", "y" ]
27.5
18
def get_one(self, qry, tpl): ''' get a single from from a query limit 1 is automatically added ''' self.cur.execute(qry + ' LIMIT 1', tpl) result = self.cur.fetchone() # unpack tuple if it has only # one element # TODO unpack results if type(result) is tuple and len(result) == 1: result = result[0] return result
[ "def", "get_one", "(", "self", ",", "qry", ",", "tpl", ")", ":", "self", ".", "cur", ".", "execute", "(", "qry", "+", "' LIMIT 1'", ",", "tpl", ")", "result", "=", "self", ".", "cur", ".", "fetchone", "(", ")", "# unpack tuple if it has only", "# one element", "# TODO unpack results", "if", "type", "(", "result", ")", "is", "tuple", "and", "len", "(", "result", ")", "==", "1", ":", "result", "=", "result", "[", "0", "]", "return", "result" ]
35.181818
9.363636
def deref(self, ctx): """ Returns the value this reference is pointing to. This method uses 'ctx' to resolve the reference and return the value this reference references. If the call was already made, it returns a cached result. It also makes sure there's no cyclic reference, and if so raises CyclicReferenceError. """ if self in ctx.call_nodes: raise CyclicReferenceError(ctx, self) if self in ctx.cached_results: return ctx.cached_results[self] try: ctx.call_nodes.add(self) ctx.call_stack.append(self) result = self.evaluate(ctx) ctx.cached_results[self] = result return result except: if ctx.exception_call_stack is None: ctx.exception_call_stack = list(ctx.call_stack) raise finally: ctx.call_stack.pop() ctx.call_nodes.remove(self)
[ "def", "deref", "(", "self", ",", "ctx", ")", ":", "if", "self", "in", "ctx", ".", "call_nodes", ":", "raise", "CyclicReferenceError", "(", "ctx", ",", "self", ")", "if", "self", "in", "ctx", ".", "cached_results", ":", "return", "ctx", ".", "cached_results", "[", "self", "]", "try", ":", "ctx", ".", "call_nodes", ".", "add", "(", "self", ")", "ctx", ".", "call_stack", ".", "append", "(", "self", ")", "result", "=", "self", ".", "evaluate", "(", "ctx", ")", "ctx", ".", "cached_results", "[", "self", "]", "=", "result", "return", "result", "except", ":", "if", "ctx", ".", "exception_call_stack", "is", "None", ":", "ctx", ".", "exception_call_stack", "=", "list", "(", "ctx", ".", "call_stack", ")", "raise", "finally", ":", "ctx", ".", "call_stack", ".", "pop", "(", ")", "ctx", ".", "call_nodes", ".", "remove", "(", "self", ")" ]
35.222222
17.888889
def get_stream_url(self, session_id, stream_id=None): """ this method returns the url to get streams information """ url = self.api_url + '/v2/project/' + self.api_key + '/session/' + session_id + '/stream' if stream_id: url = url + '/' + stream_id return url
[ "def", "get_stream_url", "(", "self", ",", "session_id", ",", "stream_id", "=", "None", ")", ":", "url", "=", "self", ".", "api_url", "+", "'/v2/project/'", "+", "self", ".", "api_key", "+", "'/session/'", "+", "session_id", "+", "'/stream'", "if", "stream_id", ":", "url", "=", "url", "+", "'/'", "+", "stream_id", "return", "url" ]
49.666667
18.666667
def getServiceNamesToTraceIds(self, time_stamp, service_name, rpc_name): """ Given a time stamp, server service name, and rpc name, fetch all of the client services calling in paired with the lists of every trace Ids (list<i64>) from the server to client. The three arguments specify epoch time in microseconds, server side service name and rpc name. The return maps contains the key - client_service_name and value - list<trace_id>. Parameters: - time_stamp - service_name - rpc_name """ self.send_getServiceNamesToTraceIds(time_stamp, service_name, rpc_name) return self.recv_getServiceNamesToTraceIds()
[ "def", "getServiceNamesToTraceIds", "(", "self", ",", "time_stamp", ",", "service_name", ",", "rpc_name", ")", ":", "self", ".", "send_getServiceNamesToTraceIds", "(", "time_stamp", ",", "service_name", ",", "rpc_name", ")", "return", "self", ".", "recv_getServiceNamesToTraceIds", "(", ")" ]
42.933333
30.533333
def add_castle(self, position): """ Adds kingside and queenside castling moves if legal :type: position: Board """ if self.has_moved or self.in_check(position): return if self.color == color.white: rook_rank = 0 else: rook_rank = 7 castle_type = { notation_const.KING_SIDE_CASTLE: { "rook_file": 7, "direction": lambda king_square, times: king_square.shift_right(times) }, notation_const.QUEEN_SIDE_CASTLE: { "rook_file": 0, "direction": lambda king_square, times: king_square.shift_left(times) } } for castle_key in castle_type: castle_dict = castle_type[castle_key] castle_rook = position.piece_at_square(Location(rook_rank, castle_dict["rook_file"])) if self._rook_legal_for_castle(castle_rook) and \ self._empty_not_in_check(position, castle_dict["direction"]): yield self.create_move(castle_dict["direction"](self.location, 2), castle_key)
[ "def", "add_castle", "(", "self", ",", "position", ")", ":", "if", "self", ".", "has_moved", "or", "self", ".", "in_check", "(", "position", ")", ":", "return", "if", "self", ".", "color", "==", "color", ".", "white", ":", "rook_rank", "=", "0", "else", ":", "rook_rank", "=", "7", "castle_type", "=", "{", "notation_const", ".", "KING_SIDE_CASTLE", ":", "{", "\"rook_file\"", ":", "7", ",", "\"direction\"", ":", "lambda", "king_square", ",", "times", ":", "king_square", ".", "shift_right", "(", "times", ")", "}", ",", "notation_const", ".", "QUEEN_SIDE_CASTLE", ":", "{", "\"rook_file\"", ":", "0", ",", "\"direction\"", ":", "lambda", "king_square", ",", "times", ":", "king_square", ".", "shift_left", "(", "times", ")", "}", "}", "for", "castle_key", "in", "castle_type", ":", "castle_dict", "=", "castle_type", "[", "castle_key", "]", "castle_rook", "=", "position", ".", "piece_at_square", "(", "Location", "(", "rook_rank", ",", "castle_dict", "[", "\"rook_file\"", "]", ")", ")", "if", "self", ".", "_rook_legal_for_castle", "(", "castle_rook", ")", "and", "self", ".", "_empty_not_in_check", "(", "position", ",", "castle_dict", "[", "\"direction\"", "]", ")", ":", "yield", "self", ".", "create_move", "(", "castle_dict", "[", "\"direction\"", "]", "(", "self", ".", "location", ",", "2", ")", ",", "castle_key", ")" ]
37.266667
22
def insert_group(node, target): """Insert node into in target tree, in appropriate group. Uses group and lang from target function. This assumes the node and target share a structure of a first child that determines the grouping, and a second child that will be accumulated in the group. """ group = target.sort lang = target.lang collator = Collator.createInstance(Locale(lang) if lang else Locale()) for child in target.tree: order = collator.compare(group(child) or '', group(node) or '') if order == 0: for nodechild in node[1:]: child.append(nodechild) break elif order > 0: child.addprevious(node) break else: target.tree.append(node)
[ "def", "insert_group", "(", "node", ",", "target", ")", ":", "group", "=", "target", ".", "sort", "lang", "=", "target", ".", "lang", "collator", "=", "Collator", ".", "createInstance", "(", "Locale", "(", "lang", ")", "if", "lang", "else", "Locale", "(", ")", ")", "for", "child", "in", "target", ".", "tree", ":", "order", "=", "collator", ".", "compare", "(", "group", "(", "child", ")", "or", "''", ",", "group", "(", "node", ")", "or", "''", ")", "if", "order", "==", "0", ":", "for", "nodechild", "in", "node", "[", "1", ":", "]", ":", "child", ".", "append", "(", "nodechild", ")", "break", "elif", "order", ">", "0", ":", "child", ".", "addprevious", "(", "node", ")", "break", "else", ":", "target", ".", "tree", ".", "append", "(", "node", ")" ]
34.454545
18.909091
def find_site_python(module_name, paths=None): """Find the rez native python package that contains the given module. This function is used by python 'native' rez installers to find the native rez python package that represents the python installation that this module is installed into. Note: This function is dependent on the behavior found in the python '_native' package found in the 'rez-recipes' repository. Specifically, it expects to find a python package with a '_site_paths' list attribute listing the site directories associated with the python installation. Args: module_name (str): Target python module. paths (list of str, optional): paths to search for packages, defaults to `config.packages_path`. Returns: `Package`: Native python package containing the named module. """ from rez.packages_ import iter_packages import subprocess import ast import os py_cmd = 'import {x}; print {x}.__path__'.format(x=module_name) p = popen(["python", "-c", py_cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode: raise InvalidPackageError( "Failed to find installed python module '%s':\n%s" % (module_name, err)) module_paths = ast.literal_eval(out.strip()) def issubdir(path, parent_path): return path.startswith(parent_path + os.sep) for package in iter_packages("python", paths=paths): if not hasattr(package, "_site_paths"): continue contained = True for module_path in module_paths: if not any(issubdir(module_path, x) for x in package._site_paths): contained = False if contained: return package raise InvalidPackageError( "Failed to find python installation containing the module '%s'. Has " "python been installed as a rez package?" % module_name)
[ "def", "find_site_python", "(", "module_name", ",", "paths", "=", "None", ")", ":", "from", "rez", ".", "packages_", "import", "iter_packages", "import", "subprocess", "import", "ast", "import", "os", "py_cmd", "=", "'import {x}; print {x}.__path__'", ".", "format", "(", "x", "=", "module_name", ")", "p", "=", "popen", "(", "[", "\"python\"", ",", "\"-c\"", ",", "py_cmd", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "out", ",", "err", "=", "p", ".", "communicate", "(", ")", "if", "p", ".", "returncode", ":", "raise", "InvalidPackageError", "(", "\"Failed to find installed python module '%s':\\n%s\"", "%", "(", "module_name", ",", "err", ")", ")", "module_paths", "=", "ast", ".", "literal_eval", "(", "out", ".", "strip", "(", ")", ")", "def", "issubdir", "(", "path", ",", "parent_path", ")", ":", "return", "path", ".", "startswith", "(", "parent_path", "+", "os", ".", "sep", ")", "for", "package", "in", "iter_packages", "(", "\"python\"", ",", "paths", "=", "paths", ")", ":", "if", "not", "hasattr", "(", "package", ",", "\"_site_paths\"", ")", ":", "continue", "contained", "=", "True", "for", "module_path", "in", "module_paths", ":", "if", "not", "any", "(", "issubdir", "(", "module_path", ",", "x", ")", "for", "x", "in", "package", ".", "_site_paths", ")", ":", "contained", "=", "False", "if", "contained", ":", "return", "package", "raise", "InvalidPackageError", "(", "\"Failed to find python installation containing the module '%s'. Has \"", "\"python been installed as a rez package?\"", "%", "module_name", ")" ]
33.810345
23.982759
def traverseItems(self, mode=TraverseMode.DepthFirst, parent=None): """ Generates a tree iterator that will traverse the items of this tree in either a depth-first or breadth-first fashion. :param mode | <XTreeWidget.TraverseMode> recurse | <bool> :return <generator> """ try: if parent: count = parent.childCount() func = parent.child else: count = self.topLevelItemCount() func = self.topLevelItem except RuntimeError: # can be raised when iterating on a deleted tree widget. return next = [] for i in range(count): try: item = func(i) except RuntimeError: # can be raised when iterating on a deleted tree widget return else: yield item if mode == XTreeWidget.TraverseMode.DepthFirst: for child in self.traverseItems(mode, item): yield child else: next.append(item) for item in next: for child in self.traverseItems(mode, item): yield child
[ "def", "traverseItems", "(", "self", ",", "mode", "=", "TraverseMode", ".", "DepthFirst", ",", "parent", "=", "None", ")", ":", "try", ":", "if", "parent", ":", "count", "=", "parent", ".", "childCount", "(", ")", "func", "=", "parent", ".", "child", "else", ":", "count", "=", "self", ".", "topLevelItemCount", "(", ")", "func", "=", "self", ".", "topLevelItem", "except", "RuntimeError", ":", "# can be raised when iterating on a deleted tree widget.\r", "return", "next", "=", "[", "]", "for", "i", "in", "range", "(", "count", ")", ":", "try", ":", "item", "=", "func", "(", "i", ")", "except", "RuntimeError", ":", "# can be raised when iterating on a deleted tree widget\r", "return", "else", ":", "yield", "item", "if", "mode", "==", "XTreeWidget", ".", "TraverseMode", ".", "DepthFirst", ":", "for", "child", "in", "self", ".", "traverseItems", "(", "mode", ",", "item", ")", ":", "yield", "child", "else", ":", "next", ".", "append", "(", "item", ")", "for", "item", "in", "next", ":", "for", "child", "in", "self", ".", "traverseItems", "(", "mode", ",", "item", ")", ":", "yield", "child" ]
33.536585
15.878049
def login(request, user): """ Persist a user id and a backend in the request. This way a user doesn't have to reauthenticate on every request. Note that data set during the anonymous session is retained when the user logs in. """ session_auth_hash = '' if user is None: user = request.user if hasattr(user, 'get_session_auth_hash'): session_auth_hash = user.get_session_auth_hash() if SESSION_KEY in request.session: session_key = request.session[SESSION_KEY] if session_key != user.pk or ( session_auth_hash and request.session.get(HASH_SESSION_KEY) != session_auth_hash): # To avoid reusing another user's session, create a new, empty # session if the existing session corresponds to a different # authenticated user. request.session.flush() else: request.session.cycle_key() request.session[SESSION_KEY] = user.pk request.session[BACKEND_SESSION_KEY] = user.backend request.session[USER_DATA_SESSION_KEY] = user.user_data request.session[HASH_SESSION_KEY] = session_auth_hash update_token_in_session(request.session, user.token) if hasattr(request, 'user'): request.user = user rotate_token(request) user_logged_in.send(sender=user.__class__, request=request, user=user)
[ "def", "login", "(", "request", ",", "user", ")", ":", "session_auth_hash", "=", "''", "if", "user", "is", "None", ":", "user", "=", "request", ".", "user", "if", "hasattr", "(", "user", ",", "'get_session_auth_hash'", ")", ":", "session_auth_hash", "=", "user", ".", "get_session_auth_hash", "(", ")", "if", "SESSION_KEY", "in", "request", ".", "session", ":", "session_key", "=", "request", ".", "session", "[", "SESSION_KEY", "]", "if", "session_key", "!=", "user", ".", "pk", "or", "(", "session_auth_hash", "and", "request", ".", "session", ".", "get", "(", "HASH_SESSION_KEY", ")", "!=", "session_auth_hash", ")", ":", "# To avoid reusing another user's session, create a new, empty", "# session if the existing session corresponds to a different", "# authenticated user.", "request", ".", "session", ".", "flush", "(", ")", "else", ":", "request", ".", "session", ".", "cycle_key", "(", ")", "request", ".", "session", "[", "SESSION_KEY", "]", "=", "user", ".", "pk", "request", ".", "session", "[", "BACKEND_SESSION_KEY", "]", "=", "user", ".", "backend", "request", ".", "session", "[", "USER_DATA_SESSION_KEY", "]", "=", "user", ".", "user_data", "request", ".", "session", "[", "HASH_SESSION_KEY", "]", "=", "session_auth_hash", "update_token_in_session", "(", "request", ".", "session", ",", "user", ".", "token", ")", "if", "hasattr", "(", "request", ",", "'user'", ")", ":", "request", ".", "user", "=", "user", "rotate_token", "(", "request", ")", "user_logged_in", ".", "send", "(", "sender", "=", "user", ".", "__class__", ",", "request", "=", "request", ",", "user", "=", "user", ")" ]
39.5
17.5
def draw_flow(img, flow, step=16, dtype=uint8): """ draws flow vectors on image this came from opencv/examples directory another way: http://docs.opencv.org/trunk/doc/py_tutorials/py_gui/py_drawing_functions/py_drawing_functions.html """ maxval = iinfo(img.dtype).max # scaleFact = 1. #arbitary factor to make flow visible canno = (0, maxval, 0) # green color h, w = img.shape[:2] y, x = mgrid[step//2:h:step, step//2:w:step].reshape(2, -1) fx, fy = flow[y, x].T # create line endpoints lines = vstack([x, y, (x+fx), (y+fy)]).T.reshape(-1, 2, 2) lines = int32(lines + 0.5) # create image if img.ndim == 2: # assume gray vis = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) else: # already RGB vis = img # draw line cv2.polylines(vis, lines, isClosed=False, color=canno, thickness=1, lineType=8) # draw filled green circles for (x1, y1), (x2, y2) in lines: cv2.circle(vis, center=(x1, y1), radius=1, color=canno, thickness=-1) return vis
[ "def", "draw_flow", "(", "img", ",", "flow", ",", "step", "=", "16", ",", "dtype", "=", "uint8", ")", ":", "maxval", "=", "iinfo", "(", "img", ".", "dtype", ")", ".", "max", "# scaleFact = 1. #arbitary factor to make flow visible", "canno", "=", "(", "0", ",", "maxval", ",", "0", ")", "# green color", "h", ",", "w", "=", "img", ".", "shape", "[", ":", "2", "]", "y", ",", "x", "=", "mgrid", "[", "step", "//", "2", ":", "h", ":", "step", ",", "step", "//", "2", ":", "w", ":", "step", "]", ".", "reshape", "(", "2", ",", "-", "1", ")", "fx", ",", "fy", "=", "flow", "[", "y", ",", "x", "]", ".", "T", "# create line endpoints", "lines", "=", "vstack", "(", "[", "x", ",", "y", ",", "(", "x", "+", "fx", ")", ",", "(", "y", "+", "fy", ")", "]", ")", ".", "T", ".", "reshape", "(", "-", "1", ",", "2", ",", "2", ")", "lines", "=", "int32", "(", "lines", "+", "0.5", ")", "# create image", "if", "img", ".", "ndim", "==", "2", ":", "# assume gray", "vis", "=", "cv2", ".", "cvtColor", "(", "img", ",", "cv2", ".", "COLOR_GRAY2BGR", ")", "else", ":", "# already RGB", "vis", "=", "img", "# draw line", "cv2", ".", "polylines", "(", "vis", ",", "lines", ",", "isClosed", "=", "False", ",", "color", "=", "canno", ",", "thickness", "=", "1", ",", "lineType", "=", "8", ")", "# draw filled green circles", "for", "(", "x1", ",", "y1", ")", ",", "(", "x2", ",", "y2", ")", "in", "lines", ":", "cv2", ".", "circle", "(", "vis", ",", "center", "=", "(", "x1", ",", "y1", ")", ",", "radius", "=", "1", ",", "color", "=", "canno", ",", "thickness", "=", "-", "1", ")", "return", "vis" ]
37.666667
17.814815
def get_config_values(config_path, section, default='default'): """ Parse ini config file and return a dict of values. The provided section overrides any values in default section. """ values = {} if not os.path.isfile(config_path): raise IpaUtilsException( 'Config file not found: %s' % config_path ) config = configparser.ConfigParser() try: config.read(config_path) except Exception: raise IpaUtilsException( 'Config file format invalid.' ) try: values.update(config.items(default)) except Exception: pass try: values.update(config.items(section)) except Exception: pass return values
[ "def", "get_config_values", "(", "config_path", ",", "section", ",", "default", "=", "'default'", ")", ":", "values", "=", "{", "}", "if", "not", "os", ".", "path", ".", "isfile", "(", "config_path", ")", ":", "raise", "IpaUtilsException", "(", "'Config file not found: %s'", "%", "config_path", ")", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "try", ":", "config", ".", "read", "(", "config_path", ")", "except", "Exception", ":", "raise", "IpaUtilsException", "(", "'Config file format invalid.'", ")", "try", ":", "values", ".", "update", "(", "config", ".", "items", "(", "default", ")", ")", "except", "Exception", ":", "pass", "try", ":", "values", ".", "update", "(", "config", ".", "items", "(", "section", ")", ")", "except", "Exception", ":", "pass", "return", "values" ]
21.636364
21.454545
def get_tools(whitelist, known_plugins): """ Filter all known plugins by a whitelist specified. If the whitelist is empty, default to all plugins. """ def getpath(c): return "%s:%s" % (c.__module__, c.__class__.__name__) tools = [x for x in known_plugins if getpath(x) in whitelist] if not tools: if whitelist: raise UnknownTools(map(getpath, known_plugins)) tools = known_plugins return tools
[ "def", "get_tools", "(", "whitelist", ",", "known_plugins", ")", ":", "def", "getpath", "(", "c", ")", ":", "return", "\"%s:%s\"", "%", "(", "c", ".", "__module__", ",", "c", ".", "__class__", ".", "__name__", ")", "tools", "=", "[", "x", "for", "x", "in", "known_plugins", "if", "getpath", "(", "x", ")", "in", "whitelist", "]", "if", "not", "tools", ":", "if", "whitelist", ":", "raise", "UnknownTools", "(", "map", "(", "getpath", ",", "known_plugins", ")", ")", "tools", "=", "known_plugins", "return", "tools" ]
29.933333
18.866667
def headers_as_list(self): """ Does the same as 'headers' except it is returned as a list. """ headers = self.headers headers_list = ['{}: {}'.format(key, value) for key, value in iteritems(headers)] return headers_list
[ "def", "headers_as_list", "(", "self", ")", ":", "headers", "=", "self", ".", "headers", "headers_list", "=", "[", "'{}: {}'", ".", "format", "(", "key", ",", "value", ")", "for", "key", ",", "value", "in", "iteritems", "(", "headers", ")", "]", "return", "headers_list" ]
37.285714
16.142857
def add_url (self, url, line=0, column=0, page=0, name=u"", base=None): """Add new URL to queue.""" if base: base_ref = urlutil.url_norm(base)[0] else: base_ref = None url_data = get_url_from(url, self.recursion_level+1, self.aggregate, parent_url=self.url, base_ref=base_ref, line=line, column=column, page=page, name=name, parent_content_type=self.content_type) self.aggregate.urlqueue.put(url_data)
[ "def", "add_url", "(", "self", ",", "url", ",", "line", "=", "0", ",", "column", "=", "0", ",", "page", "=", "0", ",", "name", "=", "u\"\"", ",", "base", "=", "None", ")", ":", "if", "base", ":", "base_ref", "=", "urlutil", ".", "url_norm", "(", "base", ")", "[", "0", "]", "else", ":", "base_ref", "=", "None", "url_data", "=", "get_url_from", "(", "url", ",", "self", ".", "recursion_level", "+", "1", ",", "self", ".", "aggregate", ",", "parent_url", "=", "self", ".", "url", ",", "base_ref", "=", "base_ref", ",", "line", "=", "line", ",", "column", "=", "column", ",", "page", "=", "page", ",", "name", "=", "name", ",", "parent_content_type", "=", "self", ".", "content_type", ")", "self", ".", "aggregate", ".", "urlqueue", ".", "put", "(", "url_data", ")" ]
48
21.3
def _check_properties(cls, property_names, require_indexed=True): """Internal helper to check the given properties exist and meet specified requirements. Called from query.py. Args: property_names: List or tuple of property names -- each being a string, possibly containing dots (to address subproperties of structured properties). Raises: InvalidPropertyError if one of the properties is invalid. AssertionError if the argument is not a list or tuple of strings. """ assert isinstance(property_names, (list, tuple)), repr(property_names) for name in property_names: assert isinstance(name, basestring), repr(name) if '.' in name: name, rest = name.split('.', 1) else: rest = None prop = cls._properties.get(name) if prop is None: cls._unknown_property(name) else: prop._check_property(rest, require_indexed=require_indexed)
[ "def", "_check_properties", "(", "cls", ",", "property_names", ",", "require_indexed", "=", "True", ")", ":", "assert", "isinstance", "(", "property_names", ",", "(", "list", ",", "tuple", ")", ")", ",", "repr", "(", "property_names", ")", "for", "name", "in", "property_names", ":", "assert", "isinstance", "(", "name", ",", "basestring", ")", ",", "repr", "(", "name", ")", "if", "'.'", "in", "name", ":", "name", ",", "rest", "=", "name", ".", "split", "(", "'.'", ",", "1", ")", "else", ":", "rest", "=", "None", "prop", "=", "cls", ".", "_properties", ".", "get", "(", "name", ")", "if", "prop", "is", "None", ":", "cls", ".", "_unknown_property", "(", "name", ")", "else", ":", "prop", ".", "_check_property", "(", "rest", ",", "require_indexed", "=", "require_indexed", ")" ]
34.62963
21.962963
def completelist(self, text): """Return a list of potential matches for completion n.b. you want to complete to a file in the current working directory that starts with a ~, use ./~ when typing in. Paths that start with ~ are magical and specify users' home paths """ path = os.path.expanduser(text) if len(path) == 0 or path[0] != os.path.sep: path = os.path.join(os.getcwd(), path) if text == '~': dpath = dtext = '' bpath = '~' files = ['~/'] elif text.startswith('~') and text.find('/', 1) < 0: return self.matchuserhome(text) else: dtext = os.path.dirname(text) dpath = os.path.dirname(path) bpath = os.path.basename(path) files = os.listdir(dpath) if bpath =='': matches = [self.buildpath(text, f) for f in files if not f.startswith('.')] else: matches = [self.buildpath(dtext, f) for f in files if f.startswith(bpath)] if len(matches) == 0 and os.path.basename(path)=='..': files = os.listdir(path) matches = [os.path.join(text, f) for f in files] return matches
[ "def", "completelist", "(", "self", ",", "text", ")", ":", "path", "=", "os", ".", "path", ".", "expanduser", "(", "text", ")", "if", "len", "(", "path", ")", "==", "0", "or", "path", "[", "0", "]", "!=", "os", ".", "path", ".", "sep", ":", "path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "path", ")", "if", "text", "==", "'~'", ":", "dpath", "=", "dtext", "=", "''", "bpath", "=", "'~'", "files", "=", "[", "'~/'", "]", "elif", "text", ".", "startswith", "(", "'~'", ")", "and", "text", ".", "find", "(", "'/'", ",", "1", ")", "<", "0", ":", "return", "self", ".", "matchuserhome", "(", "text", ")", "else", ":", "dtext", "=", "os", ".", "path", ".", "dirname", "(", "text", ")", "dpath", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "bpath", "=", "os", ".", "path", ".", "basename", "(", "path", ")", "files", "=", "os", ".", "listdir", "(", "dpath", ")", "if", "bpath", "==", "''", ":", "matches", "=", "[", "self", ".", "buildpath", "(", "text", ",", "f", ")", "for", "f", "in", "files", "if", "not", "f", ".", "startswith", "(", "'.'", ")", "]", "else", ":", "matches", "=", "[", "self", ".", "buildpath", "(", "dtext", ",", "f", ")", "for", "f", "in", "files", "if", "f", ".", "startswith", "(", "bpath", ")", "]", "if", "len", "(", "matches", ")", "==", "0", "and", "os", ".", "path", ".", "basename", "(", "path", ")", "==", "'..'", ":", "files", "=", "os", ".", "listdir", "(", "path", ")", "matches", "=", "[", "os", ".", "path", ".", "join", "(", "text", ",", "f", ")", "for", "f", "in", "files", "]", "return", "matches" ]
42
16.034483
def _struct_or_lob_handler(c, ctx): """Handles tokens that begin with an open brace.""" assert c == _OPEN_BRACE c, self = yield yield ctx.immediate_transition(_STRUCT_OR_LOB_TABLE[c](c, ctx))
[ "def", "_struct_or_lob_handler", "(", "c", ",", "ctx", ")", ":", "assert", "c", "==", "_OPEN_BRACE", "c", ",", "self", "=", "yield", "yield", "ctx", ".", "immediate_transition", "(", "_STRUCT_OR_LOB_TABLE", "[", "c", "]", "(", "c", ",", "ctx", ")", ")" ]
40.6
13.2
def submit_unseal_key(self, key=None, reset=False, migrate=False): """Enter a single master key share to progress the unsealing of the Vault. If the threshold number of master key shares is reached, Vault will attempt to unseal the Vault. Otherwise, this API must be called multiple times until that threshold is met. Either the key or reset parameter must be provided; if both are provided, reset takes precedence. Supported methods: PUT: /sys/unseal. Produces: 200 application/json :param key: Specifies a single master key share. This is required unless reset is true. :type key: str | unicode :param reset: Specifies if previously-provided unseal keys are discarded and the unseal process is reset. :type reset: bool :param migrate: Available in 1.0 Beta - Used to migrate the seal from shamir to autoseal or autoseal to shamir. Must be provided on all unseal key calls. :type: migrate: bool :return: The JSON response of the request. :rtype: dict """ params = { 'migrate': migrate, } if not reset and key is not None: params['key'] = key elif reset: params['reset'] = reset api_path = '/v1/sys/unseal' response = self._adapter.put( url=api_path, json=params, ) return response.json()
[ "def", "submit_unseal_key", "(", "self", ",", "key", "=", "None", ",", "reset", "=", "False", ",", "migrate", "=", "False", ")", ":", "params", "=", "{", "'migrate'", ":", "migrate", ",", "}", "if", "not", "reset", "and", "key", "is", "not", "None", ":", "params", "[", "'key'", "]", "=", "key", "elif", "reset", ":", "params", "[", "'reset'", "]", "=", "reset", "api_path", "=", "'/v1/sys/unseal'", "response", "=", "self", ".", "_adapter", ".", "put", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")", "return", "response", ".", "json", "(", ")" ]
39.416667
26.055556
def load_plan(self, fname): """ read the list of thoughts from a text file """ with open(fname, "r") as f: for line in f: if line != '': tpe, txt = self.parse_plan_from_string(line) #print('tpe= "' + tpe + '"', txt) if tpe == 'name': self.name = txt elif tpe == 'version': self.plan_version = txt elif tpe == 'belief': self.beliefs.add(txt) elif tpe == 'desire': self.desires.add(txt) elif tpe == 'intention': self.intentions.add(txt)
[ "def", "load_plan", "(", "self", ",", "fname", ")", ":", "with", "open", "(", "fname", ",", "\"r\"", ")", "as", "f", ":", "for", "line", "in", "f", ":", "if", "line", "!=", "''", ":", "tpe", ",", "txt", "=", "self", ".", "parse_plan_from_string", "(", "line", ")", "#print('tpe= \"' + tpe + '\"', txt)", "if", "tpe", "==", "'name'", ":", "self", ".", "name", "=", "txt", "elif", "tpe", "==", "'version'", ":", "self", ".", "plan_version", "=", "txt", "elif", "tpe", "==", "'belief'", ":", "self", ".", "beliefs", ".", "add", "(", "txt", ")", "elif", "tpe", "==", "'desire'", ":", "self", ".", "desires", ".", "add", "(", "txt", ")", "elif", "tpe", "==", "'intention'", ":", "self", ".", "intentions", ".", "add", "(", "txt", ")" ]
42.764706
6.529412
def manage_beacons(self, tag, data): ''' Manage Beacons ''' func = data.get('func', None) name = data.get('name', None) beacon_data = data.get('beacon_data', None) include_pillar = data.get('include_pillar', None) include_opts = data.get('include_opts', None) funcs = {'add': ('add_beacon', (name, beacon_data)), 'modify': ('modify_beacon', (name, beacon_data)), 'delete': ('delete_beacon', (name,)), 'enable': ('enable_beacons', ()), 'disable': ('disable_beacons', ()), 'enable_beacon': ('enable_beacon', (name,)), 'disable_beacon': ('disable_beacon', (name,)), 'list': ('list_beacons', (include_opts, include_pillar)), 'list_available': ('list_available_beacons', ()), 'validate_beacon': ('validate_beacon', (name, beacon_data)), 'reset': ('reset', ())} # Call the appropriate beacon function try: alias, params = funcs.get(func) getattr(self.beacons, alias)(*params) except AttributeError: log.error('Function "%s" is unavailable in salt.beacons', func) except TypeError as exc: log.info( 'Failed to handle %s with data(%s). Error: %s', tag, data, exc, exc_info_on_loglevel=logging.DEBUG )
[ "def", "manage_beacons", "(", "self", ",", "tag", ",", "data", ")", ":", "func", "=", "data", ".", "get", "(", "'func'", ",", "None", ")", "name", "=", "data", ".", "get", "(", "'name'", ",", "None", ")", "beacon_data", "=", "data", ".", "get", "(", "'beacon_data'", ",", "None", ")", "include_pillar", "=", "data", ".", "get", "(", "'include_pillar'", ",", "None", ")", "include_opts", "=", "data", ".", "get", "(", "'include_opts'", ",", "None", ")", "funcs", "=", "{", "'add'", ":", "(", "'add_beacon'", ",", "(", "name", ",", "beacon_data", ")", ")", ",", "'modify'", ":", "(", "'modify_beacon'", ",", "(", "name", ",", "beacon_data", ")", ")", ",", "'delete'", ":", "(", "'delete_beacon'", ",", "(", "name", ",", ")", ")", ",", "'enable'", ":", "(", "'enable_beacons'", ",", "(", ")", ")", ",", "'disable'", ":", "(", "'disable_beacons'", ",", "(", ")", ")", ",", "'enable_beacon'", ":", "(", "'enable_beacon'", ",", "(", "name", ",", ")", ")", ",", "'disable_beacon'", ":", "(", "'disable_beacon'", ",", "(", "name", ",", ")", ")", ",", "'list'", ":", "(", "'list_beacons'", ",", "(", "include_opts", ",", "include_pillar", ")", ")", ",", "'list_available'", ":", "(", "'list_available_beacons'", ",", "(", ")", ")", ",", "'validate_beacon'", ":", "(", "'validate_beacon'", ",", "(", "name", ",", "beacon_data", ")", ")", ",", "'reset'", ":", "(", "'reset'", ",", "(", ")", ")", "}", "# Call the appropriate beacon function", "try", ":", "alias", ",", "params", "=", "funcs", ".", "get", "(", "func", ")", "getattr", "(", "self", ".", "beacons", ",", "alias", ")", "(", "*", "params", ")", "except", "AttributeError", ":", "log", ".", "error", "(", "'Function \"%s\" is unavailable in salt.beacons'", ",", "func", ")", "except", "TypeError", "as", "exc", ":", "log", ".", "info", "(", "'Failed to handle %s with data(%s). Error: %s'", ",", "tag", ",", "data", ",", "exc", ",", "exc_info_on_loglevel", "=", "logging", ".", "DEBUG", ")" ]
42.805556
17.638889
def _compute_term1(self, C, mag): """ Compute magnitude dependent terms (2nd and 3rd) in equation 3 page 46. """ mag_diff = mag - 6 return C['c2'] * mag_diff + C['c3'] * mag_diff ** 2
[ "def", "_compute_term1", "(", "self", ",", "C", ",", "mag", ")", ":", "mag_diff", "=", "mag", "-", "6", "return", "C", "[", "'c2'", "]", "*", "mag_diff", "+", "C", "[", "'c3'", "]", "*", "mag_diff", "**", "2" ]
28.125
16.625
def json_call(cls, method, url, **kwargs): """ Call a remote api using json format """ # retrieve api key if needed empty_key = kwargs.pop('empty_key', False) send_key = kwargs.pop('send_key', True) return_header = kwargs.pop('return_header', False) try: apikey = cls.get('apirest.key') if not apikey and not empty_key: cls.echo("No apikey found for REST API, please use " "'gandi setup' command") sys.exit(1) if send_key: if 'headers' in kwargs: kwargs['headers'].update({'X-Api-Key': apikey}) else: kwargs['headers'] = {'X-Api-Key': apikey} except MissingConfiguration: if not empty_key: return [] # make the call cls.debug('calling url: %s %s' % (method, url)) cls.debug('with params: %r' % kwargs) try: resp, resp_headers = JsonClient.request(method, url, **kwargs) cls.dump('responded: %r' % resp) if return_header: return resp, resp_headers return resp except APICallFailed as err: cls.echo('An error occured during call: %s' % err.errors) sys.exit(1)
[ "def", "json_call", "(", "cls", ",", "method", ",", "url", ",", "*", "*", "kwargs", ")", ":", "# retrieve api key if needed", "empty_key", "=", "kwargs", ".", "pop", "(", "'empty_key'", ",", "False", ")", "send_key", "=", "kwargs", ".", "pop", "(", "'send_key'", ",", "True", ")", "return_header", "=", "kwargs", ".", "pop", "(", "'return_header'", ",", "False", ")", "try", ":", "apikey", "=", "cls", ".", "get", "(", "'apirest.key'", ")", "if", "not", "apikey", "and", "not", "empty_key", ":", "cls", ".", "echo", "(", "\"No apikey found for REST API, please use \"", "\"'gandi setup' command\"", ")", "sys", ".", "exit", "(", "1", ")", "if", "send_key", ":", "if", "'headers'", "in", "kwargs", ":", "kwargs", "[", "'headers'", "]", ".", "update", "(", "{", "'X-Api-Key'", ":", "apikey", "}", ")", "else", ":", "kwargs", "[", "'headers'", "]", "=", "{", "'X-Api-Key'", ":", "apikey", "}", "except", "MissingConfiguration", ":", "if", "not", "empty_key", ":", "return", "[", "]", "# make the call", "cls", ".", "debug", "(", "'calling url: %s %s'", "%", "(", "method", ",", "url", ")", ")", "cls", ".", "debug", "(", "'with params: %r'", "%", "kwargs", ")", "try", ":", "resp", ",", "resp_headers", "=", "JsonClient", ".", "request", "(", "method", ",", "url", ",", "*", "*", "kwargs", ")", "cls", ".", "dump", "(", "'responded: %r'", "%", "resp", ")", "if", "return_header", ":", "return", "resp", ",", "resp_headers", "return", "resp", "except", "APICallFailed", "as", "err", ":", "cls", ".", "echo", "(", "'An error occured during call: %s'", "%", "err", ".", "errors", ")", "sys", ".", "exit", "(", "1", ")" ]
40.71875
13.1875
def create_chapter_from_string(self, html_string, url=None, title=None): """ Creates a Chapter object from a string. Sanitizes the string using the clean_function method, and saves it as the content of the created chapter. Args: html_string (string): The html or xhtml content of the created Chapter url (Option[string]): A url to infer the title of the chapter from title (Option[string]): The title of the created Chapter. By default, this is None, in which case the title will try to be inferred from the webpage at the url. Returns: Chapter: A chapter object whose content is the given string and whose title is that provided or inferred from the url """ clean_html_string = self.clean_function(html_string) clean_xhtml_string = clean.html_to_xhtml(clean_html_string) if title: pass else: try: root = BeautifulSoup(html_string, 'html.parser') title_node = root.title if title_node is not None: title = unicode(title_node.string) else: raise ValueError except (IndexError, ValueError): title = 'Ebook Chapter' return Chapter(clean_xhtml_string, title, url)
[ "def", "create_chapter_from_string", "(", "self", ",", "html_string", ",", "url", "=", "None", ",", "title", "=", "None", ")", ":", "clean_html_string", "=", "self", ".", "clean_function", "(", "html_string", ")", "clean_xhtml_string", "=", "clean", ".", "html_to_xhtml", "(", "clean_html_string", ")", "if", "title", ":", "pass", "else", ":", "try", ":", "root", "=", "BeautifulSoup", "(", "html_string", ",", "'html.parser'", ")", "title_node", "=", "root", ".", "title", "if", "title_node", "is", "not", "None", ":", "title", "=", "unicode", "(", "title_node", ".", "string", ")", "else", ":", "raise", "ValueError", "except", "(", "IndexError", ",", "ValueError", ")", ":", "title", "=", "'Ebook Chapter'", "return", "Chapter", "(", "clean_xhtml_string", ",", "title", ",", "url", ")" ]
42.212121
20.393939
def ip_rtm_config_route_static_bfd_bfd_static_route_bfd_static_route_src(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip = ET.SubElement(config, "ip", xmlns="urn:brocade.com:mgmt:brocade-common-def") rtm_config = ET.SubElement(ip, "rtm-config", xmlns="urn:brocade.com:mgmt:brocade-rtm") route = ET.SubElement(rtm_config, "route") static = ET.SubElement(route, "static") bfd = ET.SubElement(static, "bfd") bfd_static_route = ET.SubElement(bfd, "bfd-static-route") bfd_static_route_dest_key = ET.SubElement(bfd_static_route, "bfd-static-route-dest") bfd_static_route_dest_key.text = kwargs.pop('bfd_static_route_dest') bfd_static_route_src = ET.SubElement(bfd_static_route, "bfd-static-route-src") bfd_static_route_src.text = kwargs.pop('bfd_static_route_src') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "ip_rtm_config_route_static_bfd_bfd_static_route_bfd_static_route_src", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "ip", "=", "ET", ".", "SubElement", "(", "config", ",", "\"ip\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-common-def\"", ")", "rtm_config", "=", "ET", ".", "SubElement", "(", "ip", ",", "\"rtm-config\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-rtm\"", ")", "route", "=", "ET", ".", "SubElement", "(", "rtm_config", ",", "\"route\"", ")", "static", "=", "ET", ".", "SubElement", "(", "route", ",", "\"static\"", ")", "bfd", "=", "ET", ".", "SubElement", "(", "static", ",", "\"bfd\"", ")", "bfd_static_route", "=", "ET", ".", "SubElement", "(", "bfd", ",", "\"bfd-static-route\"", ")", "bfd_static_route_dest_key", "=", "ET", ".", "SubElement", "(", "bfd_static_route", ",", "\"bfd-static-route-dest\"", ")", "bfd_static_route_dest_key", ".", "text", "=", "kwargs", ".", "pop", "(", "'bfd_static_route_dest'", ")", "bfd_static_route_src", "=", "ET", ".", "SubElement", "(", "bfd_static_route", ",", "\"bfd-static-route-src\"", ")", "bfd_static_route_src", ".", "text", "=", "kwargs", ".", "pop", "(", "'bfd_static_route_src'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
56.823529
25.235294
def qos_red_profile_min_threshold(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") qos = ET.SubElement(config, "qos", xmlns="urn:brocade.com:mgmt:brocade-qos") red_profile = ET.SubElement(qos, "red-profile") profile_id_key = ET.SubElement(red_profile, "profile-id") profile_id_key.text = kwargs.pop('profile_id') min_threshold = ET.SubElement(red_profile, "min-threshold") min_threshold.text = kwargs.pop('min_threshold') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "qos_red_profile_min_threshold", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "qos", "=", "ET", ".", "SubElement", "(", "config", ",", "\"qos\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-qos\"", ")", "red_profile", "=", "ET", ".", "SubElement", "(", "qos", ",", "\"red-profile\"", ")", "profile_id_key", "=", "ET", ".", "SubElement", "(", "red_profile", ",", "\"profile-id\"", ")", "profile_id_key", ".", "text", "=", "kwargs", ".", "pop", "(", "'profile_id'", ")", "min_threshold", "=", "ET", ".", "SubElement", "(", "red_profile", ",", "\"min-threshold\"", ")", "min_threshold", ".", "text", "=", "kwargs", ".", "pop", "(", "'min_threshold'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
45.923077
16.923077
def create_system(self, **system_options): """ Create an OpenMM system for every supported topology file with given system options """ if self.master is None: raise ValueError('Handler {} is not able to create systems.'.format(self)) if isinstance(self.master, ForceField): system = self.master.createSystem(self.topology, **system_options) elif isinstance(self.master, (AmberPrmtopFile, GromacsTopFile, DesmondDMSFile)): system = self.master.createSystem(**system_options) elif isinstance(self.master, CharmmPsfFile): if not hasattr(self.master, 'parmset'): raise ValueError('PSF topology files require Charmm parameters.') system = self.master.createSystem(self.master.parmset, **system_options) else: raise NotImplementedError('Handler {} is not able to create systems.'.format(self)) if self.has_box: system.setDefaultPeriodicBoxVectors(*self.box) return system
[ "def", "create_system", "(", "self", ",", "*", "*", "system_options", ")", ":", "if", "self", ".", "master", "is", "None", ":", "raise", "ValueError", "(", "'Handler {} is not able to create systems.'", ".", "format", "(", "self", ")", ")", "if", "isinstance", "(", "self", ".", "master", ",", "ForceField", ")", ":", "system", "=", "self", ".", "master", ".", "createSystem", "(", "self", ".", "topology", ",", "*", "*", "system_options", ")", "elif", "isinstance", "(", "self", ".", "master", ",", "(", "AmberPrmtopFile", ",", "GromacsTopFile", ",", "DesmondDMSFile", ")", ")", ":", "system", "=", "self", ".", "master", ".", "createSystem", "(", "*", "*", "system_options", ")", "elif", "isinstance", "(", "self", ".", "master", ",", "CharmmPsfFile", ")", ":", "if", "not", "hasattr", "(", "self", ".", "master", ",", "'parmset'", ")", ":", "raise", "ValueError", "(", "'PSF topology files require Charmm parameters.'", ")", "system", "=", "self", ".", "master", ".", "createSystem", "(", "self", ".", "master", ".", "parmset", ",", "*", "*", "system_options", ")", "else", ":", "raise", "NotImplementedError", "(", "'Handler {} is not able to create systems.'", ".", "format", "(", "self", ")", ")", "if", "self", ".", "has_box", ":", "system", ".", "setDefaultPeriodicBoxVectors", "(", "*", "self", ".", "box", ")", "return", "system" ]
48.904762
26.047619
def cost(self, logits, target): """Returns cost. Args: logits: model output. target: target. Returns: Cross-entropy loss for a sequence of logits. The loss will be averaged across time steps if time_average_cost was enabled at construction time. """ logits = tf.reshape(logits, [self._num_steps * self._batch_size, -1]) target = tf.reshape(target, [self._num_steps * self._batch_size, -1]) xent = tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=target) loss = tf.reduce_sum(xent) return loss / self._batch_size
[ "def", "cost", "(", "self", ",", "logits", ",", "target", ")", ":", "logits", "=", "tf", ".", "reshape", "(", "logits", ",", "[", "self", ".", "_num_steps", "*", "self", ".", "_batch_size", ",", "-", "1", "]", ")", "target", "=", "tf", ".", "reshape", "(", "target", ",", "[", "self", ".", "_num_steps", "*", "self", ".", "_batch_size", ",", "-", "1", "]", ")", "xent", "=", "tf", ".", "nn", ".", "softmax_cross_entropy_with_logits", "(", "logits", "=", "logits", ",", "labels", "=", "target", ")", "loss", "=", "tf", ".", "reduce_sum", "(", "xent", ")", "return", "loss", "/", "self", ".", "_batch_size" ]
33.588235
24.470588
def get_fixers(self): """Inspects the options to load the requested patterns and handlers. Returns: (pre_order, post_order), where pre_order is the list of fixers that want a pre-order AST traversal, and post_order is the list that want post-order traversal. """ pre_order_fixers = [] post_order_fixers = [] for fix_mod_path in self.fixers: mod = __import__(fix_mod_path, {}, {}, ["*"]) fix_name = fix_mod_path.rsplit(".", 1)[-1] if fix_name.startswith(self.FILE_PREFIX): fix_name = fix_name[len(self.FILE_PREFIX):] parts = fix_name.split("_") class_name = self.CLASS_PREFIX + "".join([p.title() for p in parts]) try: fix_class = getattr(mod, class_name) except AttributeError: raise FixerError("Can't find %s.%s" % (fix_name, class_name)) fixer = fix_class(self.options, self.fixer_log) if fixer.explicit and self.explicit is not True and \ fix_mod_path not in self.explicit: self.log_message("Skipping implicit fixer: %s", fix_name) continue self.log_debug("Adding transformation: %s", fix_name) if fixer.order == "pre": pre_order_fixers.append(fixer) elif fixer.order == "post": post_order_fixers.append(fixer) else: raise FixerError("Illegal fixer order: %r" % fixer.order) key_func = operator.attrgetter("run_order") pre_order_fixers.sort(key=key_func) post_order_fixers.sort(key=key_func) return (pre_order_fixers, post_order_fixers)
[ "def", "get_fixers", "(", "self", ")", ":", "pre_order_fixers", "=", "[", "]", "post_order_fixers", "=", "[", "]", "for", "fix_mod_path", "in", "self", ".", "fixers", ":", "mod", "=", "__import__", "(", "fix_mod_path", ",", "{", "}", ",", "{", "}", ",", "[", "\"*\"", "]", ")", "fix_name", "=", "fix_mod_path", ".", "rsplit", "(", "\".\"", ",", "1", ")", "[", "-", "1", "]", "if", "fix_name", ".", "startswith", "(", "self", ".", "FILE_PREFIX", ")", ":", "fix_name", "=", "fix_name", "[", "len", "(", "self", ".", "FILE_PREFIX", ")", ":", "]", "parts", "=", "fix_name", ".", "split", "(", "\"_\"", ")", "class_name", "=", "self", ".", "CLASS_PREFIX", "+", "\"\"", ".", "join", "(", "[", "p", ".", "title", "(", ")", "for", "p", "in", "parts", "]", ")", "try", ":", "fix_class", "=", "getattr", "(", "mod", ",", "class_name", ")", "except", "AttributeError", ":", "raise", "FixerError", "(", "\"Can't find %s.%s\"", "%", "(", "fix_name", ",", "class_name", ")", ")", "fixer", "=", "fix_class", "(", "self", ".", "options", ",", "self", ".", "fixer_log", ")", "if", "fixer", ".", "explicit", "and", "self", ".", "explicit", "is", "not", "True", "and", "fix_mod_path", "not", "in", "self", ".", "explicit", ":", "self", ".", "log_message", "(", "\"Skipping implicit fixer: %s\"", ",", "fix_name", ")", "continue", "self", ".", "log_debug", "(", "\"Adding transformation: %s\"", ",", "fix_name", ")", "if", "fixer", ".", "order", "==", "\"pre\"", ":", "pre_order_fixers", ".", "append", "(", "fixer", ")", "elif", "fixer", ".", "order", "==", "\"post\"", ":", "post_order_fixers", ".", "append", "(", "fixer", ")", "else", ":", "raise", "FixerError", "(", "\"Illegal fixer order: %r\"", "%", "fixer", ".", "order", ")", "key_func", "=", "operator", ".", "attrgetter", "(", "\"run_order\"", ")", "pre_order_fixers", ".", "sort", "(", "key", "=", "key_func", ")", "post_order_fixers", ".", "sort", "(", "key", "=", "key_func", ")", "return", "(", "pre_order_fixers", ",", "post_order_fixers", ")" ]
44.051282
17.615385
def start_logger(log_to_file=False, \ log_to_stream=False, \ log_to_file_level=logging.INFO, \ log_to_stream_level=logging.INFO, \ log_filename=None, \ log_stream=None, \ log_rotate=True, \ log_size=524288, \ log_number=3): """ Configures and starts a logger to monitor the execution of a **PaPy** pipeline. Arguments: - log_to_file(``bool``) [default: ``True``] Should we write logging messages into a file? - log_to_stream(``bool`` or ``object``) [default: ``False``] Should we print logging messages to a stream? If ``True`` this defaults to ``stderr``. - log_to_file_level(``int``) [default: ``INFO``] The minimum logging level of messages to be written to file. - log_to_screen_level(``int``) [default: ``ERROR``] The minimum logging level of messages to be printed to the stream. - log_filename(``str``) [default: ``"PaPy_log"`` or ``"PaPy_log_$TIME$"``] Name of the log file. Ignored if "log_to_file" is ``False``. - log_rotate(``bool``) [default: ``True``] Should we limit the number of logs? Ignored if "log_to_file" is ``False``. - log_size(``int``) [default: ``524288``] Maximum number of ``bytes`` saved in a single log file. Ignored if "log_to_file" is ``False``. - log_number(``int``) [default: ``3``] Maximum number of rotated log files Ignored if "log_to_file" is ``False``. """ if log_rotate: log_filename = log_filename or 'PaPy_log' else: run_time = "_".join(map(str, time.localtime()[0:5])) log_filename = 'PaPy_log_%s' % run_time root_log = logging.getLogger() formatter = logging.Formatter( "%(levelname)s %(asctime)s,%(msecs).3d [%(name)s] - %(message)s", \ datefmt='%H:%M:%S') root_log.setLevel(logging.DEBUG) if log_to_file: if log_rotate: file_handler = logging.handlers.RotatingFileHandler(log_filename, \ maxBytes=log_size, backupCount=log_number) else: file_handler = logging.FileHandler(log_filename, 'w') file_handler.setLevel(log_to_file_level) file_handler.setFormatter(formatter) root_log.addHandler(file_handler) if log_to_stream: stream_handler = logging.StreamHandler(log_stream) stream_handler.setLevel(log_to_stream_level) stream_handler.setFormatter(formatter) root_log.addHandler(stream_handler)
[ "def", "start_logger", "(", "log_to_file", "=", "False", ",", "log_to_stream", "=", "False", ",", "log_to_file_level", "=", "logging", ".", "INFO", ",", "log_to_stream_level", "=", "logging", ".", "INFO", ",", "log_filename", "=", "None", ",", "log_stream", "=", "None", ",", "log_rotate", "=", "True", ",", "log_size", "=", "524288", ",", "log_number", "=", "3", ")", ":", "if", "log_rotate", ":", "log_filename", "=", "log_filename", "or", "'PaPy_log'", "else", ":", "run_time", "=", "\"_\"", ".", "join", "(", "map", "(", "str", ",", "time", ".", "localtime", "(", ")", "[", "0", ":", "5", "]", ")", ")", "log_filename", "=", "'PaPy_log_%s'", "%", "run_time", "root_log", "=", "logging", ".", "getLogger", "(", ")", "formatter", "=", "logging", ".", "Formatter", "(", "\"%(levelname)s %(asctime)s,%(msecs).3d [%(name)s] - %(message)s\"", ",", "datefmt", "=", "'%H:%M:%S'", ")", "root_log", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "if", "log_to_file", ":", "if", "log_rotate", ":", "file_handler", "=", "logging", ".", "handlers", ".", "RotatingFileHandler", "(", "log_filename", ",", "maxBytes", "=", "log_size", ",", "backupCount", "=", "log_number", ")", "else", ":", "file_handler", "=", "logging", ".", "FileHandler", "(", "log_filename", ",", "'w'", ")", "file_handler", ".", "setLevel", "(", "log_to_file_level", ")", "file_handler", ".", "setFormatter", "(", "formatter", ")", "root_log", ".", "addHandler", "(", "file_handler", ")", "if", "log_to_stream", ":", "stream_handler", "=", "logging", ".", "StreamHandler", "(", "log_stream", ")", "stream_handler", ".", "setLevel", "(", "log_to_stream_level", ")", "stream_handler", ".", "setFormatter", "(", "formatter", ")", "root_log", ".", "addHandler", "(", "stream_handler", ")" ]
43.79661
19.491525
def __msg_curse_sum(self, ret, sep_char='_', mmm=None, args=None): """ Build the sum message (only when filter is on) and add it to the ret dict. * ret: list of string where the message is added * sep_char: define the line separation char * mmm: display min, max, mean or current (if mmm=None) * args: Glances args """ ret.append(self.curse_new_line()) if mmm is None: ret.append(self.curse_add_line(sep_char * 69)) ret.append(self.curse_new_line()) # CPU percent sum msg = self.layout_stat['cpu'].format(self.__sum_stats('cpu_percent', mmm=mmm)) ret.append(self.curse_add_line(msg, decoration=self.__mmm_deco(mmm))) # MEM percent sum msg = self.layout_stat['mem'].format(self.__sum_stats('memory_percent', mmm=mmm)) ret.append(self.curse_add_line(msg, decoration=self.__mmm_deco(mmm))) # VIRT and RES memory sum if 'memory_info' in self.stats[0] and self.stats[0]['memory_info'] is not None and self.stats[0]['memory_info'] != '': # VMS msg = self.layout_stat['virt'].format(self.auto_unit(self.__sum_stats('memory_info', indice=1, mmm=mmm), low_precision=False)) ret.append(self.curse_add_line(msg, decoration=self.__mmm_deco(mmm), optional=True)) # RSS msg = self.layout_stat['res'].format(self.auto_unit(self.__sum_stats('memory_info', indice=0, mmm=mmm), low_precision=False)) ret.append(self.curse_add_line(msg, decoration=self.__mmm_deco(mmm), optional=True)) else: msg = self.layout_header['virt'].format('') ret.append(self.curse_add_line(msg)) msg = self.layout_header['res'].format('') ret.append(self.curse_add_line(msg)) # PID msg = self.layout_header['pid'].format('', width=self.__max_pid_size()) ret.append(self.curse_add_line(msg)) # USER msg = self.layout_header['user'].format('') ret.append(self.curse_add_line(msg)) # TIME+ msg = self.layout_header['time'].format('') ret.append(self.curse_add_line(msg, optional=True)) # THREAD msg = self.layout_header['thread'].format('') ret.append(self.curse_add_line(msg)) # NICE msg = self.layout_header['nice'].format('') ret.append(self.curse_add_line(msg)) # STATUS msg = self.layout_header['status'].format('') ret.append(self.curse_add_line(msg)) # IO read/write if 'io_counters' in self.stats[0] and mmm is None: # IO read io_rs = int((self.__sum_stats('io_counters', 0) - self.__sum_stats('io_counters', indice=2, mmm=mmm)) / self.stats[0]['time_since_update']) if io_rs == 0: msg = self.layout_stat['ior'].format('0') else: msg = self.layout_stat['ior'].format(self.auto_unit(io_rs, low_precision=True)) ret.append(self.curse_add_line(msg, decoration=self.__mmm_deco(mmm), optional=True, additional=True)) # IO write io_ws = int((self.__sum_stats('io_counters', 1) - self.__sum_stats('io_counters', indice=3, mmm=mmm)) / self.stats[0]['time_since_update']) if io_ws == 0: msg = self.layout_stat['iow'].format('0') else: msg = self.layout_stat['iow'].format(self.auto_unit(io_ws, low_precision=True)) ret.append(self.curse_add_line(msg, decoration=self.__mmm_deco(mmm), optional=True, additional=True)) else: msg = self.layout_header['ior'].format('') ret.append(self.curse_add_line(msg, optional=True, additional=True)) msg = self.layout_header['iow'].format('') ret.append(self.curse_add_line(msg, optional=True, additional=True)) if mmm is None: msg = ' < {}'.format('current') ret.append(self.curse_add_line(msg, optional=True)) else: msg = ' < {}'.format(mmm) ret.append(self.curse_add_line(msg, optional=True)) msg = ' (\'M\' to reset)' ret.append(self.curse_add_line(msg, optional=True))
[ "def", "__msg_curse_sum", "(", "self", ",", "ret", ",", "sep_char", "=", "'_'", ",", "mmm", "=", "None", ",", "args", "=", "None", ")", ":", "ret", ".", "append", "(", "self", ".", "curse_new_line", "(", ")", ")", "if", "mmm", "is", "None", ":", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "sep_char", "*", "69", ")", ")", "ret", ".", "append", "(", "self", ".", "curse_new_line", "(", ")", ")", "# CPU percent sum", "msg", "=", "self", ".", "layout_stat", "[", "'cpu'", "]", ".", "format", "(", "self", ".", "__sum_stats", "(", "'cpu_percent'", ",", "mmm", "=", "mmm", ")", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ",", "decoration", "=", "self", ".", "__mmm_deco", "(", "mmm", ")", ")", ")", "# MEM percent sum", "msg", "=", "self", ".", "layout_stat", "[", "'mem'", "]", ".", "format", "(", "self", ".", "__sum_stats", "(", "'memory_percent'", ",", "mmm", "=", "mmm", ")", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ",", "decoration", "=", "self", ".", "__mmm_deco", "(", "mmm", ")", ")", ")", "# VIRT and RES memory sum", "if", "'memory_info'", "in", "self", ".", "stats", "[", "0", "]", "and", "self", ".", "stats", "[", "0", "]", "[", "'memory_info'", "]", "is", "not", "None", "and", "self", ".", "stats", "[", "0", "]", "[", "'memory_info'", "]", "!=", "''", ":", "# VMS", "msg", "=", "self", ".", "layout_stat", "[", "'virt'", "]", ".", "format", "(", "self", ".", "auto_unit", "(", "self", ".", "__sum_stats", "(", "'memory_info'", ",", "indice", "=", "1", ",", "mmm", "=", "mmm", ")", ",", "low_precision", "=", "False", ")", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ",", "decoration", "=", "self", ".", "__mmm_deco", "(", "mmm", ")", ",", "optional", "=", "True", ")", ")", "# RSS", "msg", "=", "self", ".", "layout_stat", "[", "'res'", "]", ".", "format", "(", "self", ".", "auto_unit", "(", "self", ".", "__sum_stats", "(", "'memory_info'", ",", "indice", "=", "0", ",", "mmm", "=", "mmm", ")", ",", "low_precision", "=", "False", ")", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ",", "decoration", "=", "self", ".", "__mmm_deco", "(", "mmm", ")", ",", "optional", "=", "True", ")", ")", "else", ":", "msg", "=", "self", ".", "layout_header", "[", "'virt'", "]", ".", "format", "(", "''", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ")", ")", "msg", "=", "self", ".", "layout_header", "[", "'res'", "]", ".", "format", "(", "''", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ")", ")", "# PID", "msg", "=", "self", ".", "layout_header", "[", "'pid'", "]", ".", "format", "(", "''", ",", "width", "=", "self", ".", "__max_pid_size", "(", ")", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ")", ")", "# USER", "msg", "=", "self", ".", "layout_header", "[", "'user'", "]", ".", "format", "(", "''", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ")", ")", "# TIME+", "msg", "=", "self", ".", "layout_header", "[", "'time'", "]", ".", "format", "(", "''", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ",", "optional", "=", "True", ")", ")", "# THREAD", "msg", "=", "self", ".", "layout_header", "[", "'thread'", "]", ".", "format", "(", "''", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ")", ")", "# NICE", "msg", "=", "self", ".", "layout_header", "[", "'nice'", "]", ".", "format", "(", "''", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ")", ")", "# STATUS", "msg", "=", "self", ".", "layout_header", "[", "'status'", "]", ".", "format", "(", "''", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ")", ")", "# IO read/write", "if", "'io_counters'", "in", "self", ".", "stats", "[", "0", "]", "and", "mmm", "is", "None", ":", "# IO read", "io_rs", "=", "int", "(", "(", "self", ".", "__sum_stats", "(", "'io_counters'", ",", "0", ")", "-", "self", ".", "__sum_stats", "(", "'io_counters'", ",", "indice", "=", "2", ",", "mmm", "=", "mmm", ")", ")", "/", "self", ".", "stats", "[", "0", "]", "[", "'time_since_update'", "]", ")", "if", "io_rs", "==", "0", ":", "msg", "=", "self", ".", "layout_stat", "[", "'ior'", "]", ".", "format", "(", "'0'", ")", "else", ":", "msg", "=", "self", ".", "layout_stat", "[", "'ior'", "]", ".", "format", "(", "self", ".", "auto_unit", "(", "io_rs", ",", "low_precision", "=", "True", ")", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ",", "decoration", "=", "self", ".", "__mmm_deco", "(", "mmm", ")", ",", "optional", "=", "True", ",", "additional", "=", "True", ")", ")", "# IO write", "io_ws", "=", "int", "(", "(", "self", ".", "__sum_stats", "(", "'io_counters'", ",", "1", ")", "-", "self", ".", "__sum_stats", "(", "'io_counters'", ",", "indice", "=", "3", ",", "mmm", "=", "mmm", ")", ")", "/", "self", ".", "stats", "[", "0", "]", "[", "'time_since_update'", "]", ")", "if", "io_ws", "==", "0", ":", "msg", "=", "self", ".", "layout_stat", "[", "'iow'", "]", ".", "format", "(", "'0'", ")", "else", ":", "msg", "=", "self", ".", "layout_stat", "[", "'iow'", "]", ".", "format", "(", "self", ".", "auto_unit", "(", "io_ws", ",", "low_precision", "=", "True", ")", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ",", "decoration", "=", "self", ".", "__mmm_deco", "(", "mmm", ")", ",", "optional", "=", "True", ",", "additional", "=", "True", ")", ")", "else", ":", "msg", "=", "self", ".", "layout_header", "[", "'ior'", "]", ".", "format", "(", "''", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ",", "optional", "=", "True", ",", "additional", "=", "True", ")", ")", "msg", "=", "self", ".", "layout_header", "[", "'iow'", "]", ".", "format", "(", "''", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ",", "optional", "=", "True", ",", "additional", "=", "True", ")", ")", "if", "mmm", "is", "None", ":", "msg", "=", "' < {}'", ".", "format", "(", "'current'", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ",", "optional", "=", "True", ")", ")", "else", ":", "msg", "=", "' < {}'", ".", "format", "(", "mmm", ")", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ",", "optional", "=", "True", ")", ")", "msg", "=", "' (\\'M\\' to reset)'", "ret", ".", "append", "(", "self", ".", "curse_add_line", "(", "msg", ",", "optional", "=", "True", ")", ")" ]
51.382022
24.078652
def valueFromString(self, value, extra=None, db=None): """ Converts the inputted string text to a value that matches the type from this column type. :param value | <str> extra | <variant> """ try: return projex.text.safe_eval(value) except ValueError: return 0
[ "def", "valueFromString", "(", "self", ",", "value", ",", "extra", "=", "None", ",", "db", "=", "None", ")", ":", "try", ":", "return", "projex", ".", "text", ".", "safe_eval", "(", "value", ")", "except", "ValueError", ":", "return", "0" ]
29.583333
15.583333
def init_dirs(rootdir_or_loader, outputpath, saveto_dir='data', auximages_dir='auximages', prefix='crd'): """Initialize the directiories. Inputs: rootdir_or_loader: depends on the type: str: the root directory of the SAXSCtrl/CCT software, i.e. where the subfolders ``eval2d``, ``param``, ``images``, ``mask`` etc. reside. sastool.classes2.Loader instance: a fully initialized loader, which will be used to acquire headers and exposures. list: a list of sastool.classes2.Loader instances, which will be used to open headers and exposures. When opening something, always the first item will be tried first, and if it fails with FileNotFoundError, the second, third, etc. will be tried until either the file can be opened or the last one fails. outputpath: the directory where the produced files are written. This is usually the working directory of the IPython notebook. saveto_dir: the subdirectory where averaged, united, subtracted etc. datasets are written. auximages_dir: the subdirectory where automatically produced images reside. Remarks: If a single root directory is given, a list of four loaders will be constructed in this order: CCT (processed), CCT (raw), SAXSCtrl (processed), SAXSCtrl (raw). Raw and processed loaders are handled separately. """ ip = get_ipython() if isinstance(rootdir_or_loader, str): print("Initializing loaders for SAXSCtrl and CCT.", flush=True) ip.user_ns['_loaders'] = [ credo_cct.Loader(rootdir_or_loader, processed=True, exposureclass=prefix), credo_saxsctrl.Loader(rootdir_or_loader, processed=True, exposureclass=prefix), credo_cct.Loader(rootdir_or_loader, processed=False, exposureclass=prefix), credo_saxsctrl.Loader(rootdir_or_loader, processed=False, exposureclass=prefix), ] print("Loaders initialized.", flush=True) elif isinstance(rootdir_or_loader, Loader): ip.user_ns['_loaders'] = [rootdir_or_loader] elif isinstance(rootdir_or_loader, list) and all([isinstance(l, Loader) for l in rootdir_or_loader]): ip.user_ns['_loaders'] = rootdir_or_loader[:] else: raise TypeError(rootdir_or_loader) if not os.path.isdir(outputpath): os.makedirs(outputpath) print("Output files will be written to:", outputpath) os.chdir(outputpath) ip.user_ns['outputpath'] = outputpath if not os.path.isdir(os.path.join(ip.user_ns['outputpath'], saveto_dir)): os.mkdir(os.path.join(ip.user_ns['outputpath'], saveto_dir)) if not os.path.isdir(os.path.join(ip.user_ns['outputpath'], auximages_dir)): os.mkdir(os.path.join(ip.user_ns['outputpath'], auximages_dir)) ip.user_ns['auximages_dir'] = os.path.join(outputpath, auximages_dir) ip.user_ns['saveto_dir'] = os.path.join(outputpath, saveto_dir) ip.user_ns['saveto_dir_rel'] = saveto_dir ip.user_ns['auximages_dir_rel'] = auximages_dir ip.user_ns['crd_prefix']=prefix set_length_units('nm')
[ "def", "init_dirs", "(", "rootdir_or_loader", ",", "outputpath", ",", "saveto_dir", "=", "'data'", ",", "auximages_dir", "=", "'auximages'", ",", "prefix", "=", "'crd'", ")", ":", "ip", "=", "get_ipython", "(", ")", "if", "isinstance", "(", "rootdir_or_loader", ",", "str", ")", ":", "print", "(", "\"Initializing loaders for SAXSCtrl and CCT.\"", ",", "flush", "=", "True", ")", "ip", ".", "user_ns", "[", "'_loaders'", "]", "=", "[", "credo_cct", ".", "Loader", "(", "rootdir_or_loader", ",", "processed", "=", "True", ",", "exposureclass", "=", "prefix", ")", ",", "credo_saxsctrl", ".", "Loader", "(", "rootdir_or_loader", ",", "processed", "=", "True", ",", "exposureclass", "=", "prefix", ")", ",", "credo_cct", ".", "Loader", "(", "rootdir_or_loader", ",", "processed", "=", "False", ",", "exposureclass", "=", "prefix", ")", ",", "credo_saxsctrl", ".", "Loader", "(", "rootdir_or_loader", ",", "processed", "=", "False", ",", "exposureclass", "=", "prefix", ")", ",", "]", "print", "(", "\"Loaders initialized.\"", ",", "flush", "=", "True", ")", "elif", "isinstance", "(", "rootdir_or_loader", ",", "Loader", ")", ":", "ip", ".", "user_ns", "[", "'_loaders'", "]", "=", "[", "rootdir_or_loader", "]", "elif", "isinstance", "(", "rootdir_or_loader", ",", "list", ")", "and", "all", "(", "[", "isinstance", "(", "l", ",", "Loader", ")", "for", "l", "in", "rootdir_or_loader", "]", ")", ":", "ip", ".", "user_ns", "[", "'_loaders'", "]", "=", "rootdir_or_loader", "[", ":", "]", "else", ":", "raise", "TypeError", "(", "rootdir_or_loader", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "outputpath", ")", ":", "os", ".", "makedirs", "(", "outputpath", ")", "print", "(", "\"Output files will be written to:\"", ",", "outputpath", ")", "os", ".", "chdir", "(", "outputpath", ")", "ip", ".", "user_ns", "[", "'outputpath'", "]", "=", "outputpath", "if", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "ip", ".", "user_ns", "[", "'outputpath'", "]", ",", "saveto_dir", ")", ")", ":", "os", ".", "mkdir", "(", "os", ".", "path", ".", "join", "(", "ip", ".", "user_ns", "[", "'outputpath'", "]", ",", "saveto_dir", ")", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "ip", ".", "user_ns", "[", "'outputpath'", "]", ",", "auximages_dir", ")", ")", ":", "os", ".", "mkdir", "(", "os", ".", "path", ".", "join", "(", "ip", ".", "user_ns", "[", "'outputpath'", "]", ",", "auximages_dir", ")", ")", "ip", ".", "user_ns", "[", "'auximages_dir'", "]", "=", "os", ".", "path", ".", "join", "(", "outputpath", ",", "auximages_dir", ")", "ip", ".", "user_ns", "[", "'saveto_dir'", "]", "=", "os", ".", "path", ".", "join", "(", "outputpath", ",", "saveto_dir", ")", "ip", ".", "user_ns", "[", "'saveto_dir_rel'", "]", "=", "saveto_dir", "ip", ".", "user_ns", "[", "'auximages_dir_rel'", "]", "=", "auximages_dir", "ip", ".", "user_ns", "[", "'crd_prefix'", "]", "=", "prefix", "set_length_units", "(", "'nm'", ")" ]
49.015152
23.530303
def distribute(self, f, n): """Distribute the computations amongst the multiprocessing pools Parameters ---------- f : function Function to be distributed to the processors n : int The values in range(0,n) will be passed as arguments to the function f. """ if self.pool is None: return [f(i) for i in range(n)] else: return self.pool.map(f, range(n))
[ "def", "distribute", "(", "self", ",", "f", ",", "n", ")", ":", "if", "self", ".", "pool", "is", "None", ":", "return", "[", "f", "(", "i", ")", "for", "i", "in", "range", "(", "n", ")", "]", "else", ":", "return", "self", ".", "pool", ".", "map", "(", "f", ",", "range", "(", "n", ")", ")" ]
28.4375
18.125
def delete(self): """Delete this column family. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_delete_column_family] :end-before: [END bigtable_delete_column_family] """ modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification( id=self.column_family_id, drop=True ) client = self._table._instance._client # data it contains are the GC rule and the column family ID already # stored on this instance. client.table_admin_client.modify_column_families( self._table.name, [modification] )
[ "def", "delete", "(", "self", ")", ":", "modification", "=", "table_admin_v2_pb2", ".", "ModifyColumnFamiliesRequest", ".", "Modification", "(", "id", "=", "self", ".", "column_family_id", ",", "drop", "=", "True", ")", "client", "=", "self", ".", "_table", ".", "_instance", ".", "_client", "# data it contains are the GC rule and the column family ID already", "# stored on this instance.", "client", ".", "table_admin_client", ".", "modify_column_families", "(", "self", ".", "_table", ".", "name", ",", "[", "modification", "]", ")" ]
32.85
21.55
def computeActivity(self, activePresynapticCells, connectedPermanence): """ Compute each segment's number of active synapses for a given input. In the returned lists, a segment's active synapse count is stored at index ``segment.flatIdx``. :param activePresynapticCells: (iter) Active cells. :param connectedPermanence: (float) Permanence threshold for a synapse to be considered connected :returns: (tuple) (``numActiveConnectedSynapsesForSegment`` [list], ``numActivePotentialSynapsesForSegment`` [list]) """ numActiveConnectedSynapsesForSegment = [0] * self._nextFlatIdx numActivePotentialSynapsesForSegment = [0] * self._nextFlatIdx threshold = connectedPermanence - EPSILON for cell in activePresynapticCells: for synapse in self._synapsesForPresynapticCell[cell]: flatIdx = synapse.segment.flatIdx numActivePotentialSynapsesForSegment[flatIdx] += 1 if synapse.permanence > threshold: numActiveConnectedSynapsesForSegment[flatIdx] += 1 return (numActiveConnectedSynapsesForSegment, numActivePotentialSynapsesForSegment)
[ "def", "computeActivity", "(", "self", ",", "activePresynapticCells", ",", "connectedPermanence", ")", ":", "numActiveConnectedSynapsesForSegment", "=", "[", "0", "]", "*", "self", ".", "_nextFlatIdx", "numActivePotentialSynapsesForSegment", "=", "[", "0", "]", "*", "self", ".", "_nextFlatIdx", "threshold", "=", "connectedPermanence", "-", "EPSILON", "for", "cell", "in", "activePresynapticCells", ":", "for", "synapse", "in", "self", ".", "_synapsesForPresynapticCell", "[", "cell", "]", ":", "flatIdx", "=", "synapse", ".", "segment", ".", "flatIdx", "numActivePotentialSynapsesForSegment", "[", "flatIdx", "]", "+=", "1", "if", "synapse", ".", "permanence", ">", "threshold", ":", "numActiveConnectedSynapsesForSegment", "[", "flatIdx", "]", "+=", "1", "return", "(", "numActiveConnectedSynapsesForSegment", ",", "numActivePotentialSynapsesForSegment", ")" ]
40.785714
22.107143
def tz_convert(dt, to_tz, from_tz=None) -> str: """ Convert to tz Args: dt: date time to_tz: to tz from_tz: from tz - will be ignored if tz from dt is given Returns: str: date & time Examples: >>> dt_1 = pd.Timestamp('2018-09-10 16:00', tz='Asia/Hong_Kong') >>> tz_convert(dt_1, to_tz='NY') '2018-09-10 04:00:00-04:00' >>> dt_2 = pd.Timestamp('2018-01-10 16:00') >>> tz_convert(dt_2, to_tz='HK', from_tz='NY') '2018-01-11 05:00:00+08:00' >>> dt_3 = '2018-09-10 15:00' >>> tz_convert(dt_3, to_tz='NY', from_tz='JP') '2018-09-10 02:00:00-04:00' """ logger = logs.get_logger(tz_convert, level='info') f_tz, t_tz = get_tz(from_tz), get_tz(to_tz) from_dt = pd.Timestamp(str(dt), tz=f_tz) logger.debug(f'converting {str(from_dt)} from {f_tz} to {t_tz} ...') return str(pd.Timestamp(str(from_dt), tz=t_tz))
[ "def", "tz_convert", "(", "dt", ",", "to_tz", ",", "from_tz", "=", "None", ")", "->", "str", ":", "logger", "=", "logs", ".", "get_logger", "(", "tz_convert", ",", "level", "=", "'info'", ")", "f_tz", ",", "t_tz", "=", "get_tz", "(", "from_tz", ")", ",", "get_tz", "(", "to_tz", ")", "from_dt", "=", "pd", ".", "Timestamp", "(", "str", "(", "dt", ")", ",", "tz", "=", "f_tz", ")", "logger", ".", "debug", "(", "f'converting {str(from_dt)} from {f_tz} to {t_tz} ...'", ")", "return", "str", "(", "pd", ".", "Timestamp", "(", "str", "(", "from_dt", ")", ",", "tz", "=", "t_tz", ")", ")" ]
31.827586
17.689655
def p_range(self, p): """range : value DOT_DOT value | value""" n = len(p) if n == 2: p[0] = (p[1],) elif n == 4: p[0] = (p[1], p[3])
[ "def", "p_range", "(", "self", ",", "p", ")", ":", "n", "=", "len", "(", "p", ")", "if", "n", "==", "2", ":", "p", "[", "0", "]", "=", "(", "p", "[", "1", "]", ",", ")", "elif", "n", "==", "4", ":", "p", "[", "0", "]", "=", "(", "p", "[", "1", "]", ",", "p", "[", "3", "]", ")" ]
24.875
13.25
def pack_factorisation(facto_list): """ :param facto_list: list of parser or tuple of factorisation :return: """ _sum = [] for f in facto_list: if isinstance(f, Script): _sum.append(f) else: # tuple of factorisation _sum.append(MultiplicativeScript(children=(pack_factorisation(l_f) for l_f in f))) if len(_sum) == 1: return _sum[0] else: return AdditiveScript(children=_sum)
[ "def", "pack_factorisation", "(", "facto_list", ")", ":", "_sum", "=", "[", "]", "for", "f", "in", "facto_list", ":", "if", "isinstance", "(", "f", ",", "Script", ")", ":", "_sum", ".", "append", "(", "f", ")", "else", ":", "# tuple of factorisation", "_sum", ".", "append", "(", "MultiplicativeScript", "(", "children", "=", "(", "pack_factorisation", "(", "l_f", ")", "for", "l_f", "in", "f", ")", ")", ")", "if", "len", "(", "_sum", ")", "==", "1", ":", "return", "_sum", "[", "0", "]", "else", ":", "return", "AdditiveScript", "(", "children", "=", "_sum", ")" ]
27.058824
18.588235
def is_allowed(self, role, method, resource): """Check whether role is allowed to access resource :param role: Role to be checked. :param method: Method to be checked. :param resource: View function to be checked. """ return (role, method, resource) in self._allowed
[ "def", "is_allowed", "(", "self", ",", "role", ",", "method", ",", "resource", ")", ":", "return", "(", "role", ",", "method", ",", "resource", ")", "in", "self", ".", "_allowed" ]
38.5
9.75
def number(ctx, seq=None): ''' Yields one float, derived from the first item in the argument sequence (unless empty in which case yield NaN) as follows: * If string with optional whitespace followed by an optional minus sign followed by a Number followed by whitespace, converte to the IEEE 754 number that is nearest (according to the IEEE 754 round-to-nearest rule) to the mathematical value represented by the string; in case of any other string yield NaN * If boolean true yield 1; if boolean false yield 0 * If a node convert to string as if by a call to string(); yield the same value as if passed that string argument to number() ''' if hasattr(obj, 'compute'): obj = next(seq.compute(ctx), '') else: obj = seq yield next(to_number(obj), '')
[ "def", "number", "(", "ctx", ",", "seq", "=", "None", ")", ":", "if", "hasattr", "(", "obj", ",", "'compute'", ")", ":", "obj", "=", "next", "(", "seq", ".", "compute", "(", "ctx", ")", ",", "''", ")", "else", ":", "obj", "=", "seq", "yield", "next", "(", "to_number", "(", "obj", ")", ",", "''", ")" ]
60.692308
49.769231
def get_report_details(self, report_id, id_type=None): """ Retrieves a report by its ID. Internal and external IDs are both allowed. :param str report_id: The ID of the incident report. :param str id_type: Indicates whether ID is internal or external. :return: The retrieved |Report| object. Example: >>> report = ts.get_report_details("1a09f14b-ef8c-443f-b082-9643071c522a") >>> print(report) { "id": "1a09f14b-ef8c-443f-b082-9643071c522a", "created": 1515571633505, "updated": 1515620420062, "reportBody": "Employee reported suspect email. We had multiple reports of suspicious email overnight ...", "title": "Phishing Incident", "enclaveIds": [ "ac6a0d17-7350-4410-bc57-9699521db992" ], "distributionType": "ENCLAVE", "timeBegan": 1479941278000 } """ params = {'idType': id_type} resp = self._client.get("reports/%s" % report_id, params=params) return Report.from_dict(resp.json())
[ "def", "get_report_details", "(", "self", ",", "report_id", ",", "id_type", "=", "None", ")", ":", "params", "=", "{", "'idType'", ":", "id_type", "}", "resp", "=", "self", ".", "_client", ".", "get", "(", "\"reports/%s\"", "%", "report_id", ",", "params", "=", "params", ")", "return", "Report", ".", "from_dict", "(", "resp", ".", "json", "(", ")", ")" ]
34.709677
22.580645
def ProcessClients(self, responses): """Does the work.""" del responses end = rdfvalue.RDFDatetime.Now() - db.CLIENT_STATS_RETENTION client_urns = export_utils.GetAllClients(token=self.token) for batch in collection.Batch(client_urns, 10000): with data_store.DB.GetMutationPool() as mutation_pool: for client_urn in batch: mutation_pool.DeleteAttributes( client_urn.Add("stats"), [u"aff4:stats"], start=0, end=end.AsMicrosecondsSinceEpoch()) self.HeartBeat() if data_store.RelationalDBEnabled(): total_deleted_count = 0 for deleted_count in data_store.REL_DB.DeleteOldClientStats( yield_after_count=_STATS_DELETION_BATCH_SIZE, retention_time=end): self.HeartBeat() total_deleted_count += deleted_count self.Log("Deleted %d ClientStats that expired before %s", total_deleted_count, end)
[ "def", "ProcessClients", "(", "self", ",", "responses", ")", ":", "del", "responses", "end", "=", "rdfvalue", ".", "RDFDatetime", ".", "Now", "(", ")", "-", "db", ".", "CLIENT_STATS_RETENTION", "client_urns", "=", "export_utils", ".", "GetAllClients", "(", "token", "=", "self", ".", "token", ")", "for", "batch", "in", "collection", ".", "Batch", "(", "client_urns", ",", "10000", ")", ":", "with", "data_store", ".", "DB", ".", "GetMutationPool", "(", ")", "as", "mutation_pool", ":", "for", "client_urn", "in", "batch", ":", "mutation_pool", ".", "DeleteAttributes", "(", "client_urn", ".", "Add", "(", "\"stats\"", ")", ",", "[", "u\"aff4:stats\"", "]", ",", "start", "=", "0", ",", "end", "=", "end", ".", "AsMicrosecondsSinceEpoch", "(", ")", ")", "self", ".", "HeartBeat", "(", ")", "if", "data_store", ".", "RelationalDBEnabled", "(", ")", ":", "total_deleted_count", "=", "0", "for", "deleted_count", "in", "data_store", ".", "REL_DB", ".", "DeleteOldClientStats", "(", "yield_after_count", "=", "_STATS_DELETION_BATCH_SIZE", ",", "retention_time", "=", "end", ")", ":", "self", ".", "HeartBeat", "(", ")", "total_deleted_count", "+=", "deleted_count", "self", ".", "Log", "(", "\"Deleted %d ClientStats that expired before %s\"", ",", "total_deleted_count", ",", "end", ")" ]
38.333333
17.166667
def index(self, name=None): # pylint: disable=C6409 """Returns index number of supplied column name. Args: name: string of column name. Raises: TableError: If name not found. Returns: Index of the specified header entry. """ try: return self.header.index(name) except ValueError: raise TableError('Unknown index name %s.' % name)
[ "def", "index", "(", "self", ",", "name", "=", "None", ")", ":", "# pylint: disable=C6409", "try", ":", "return", "self", ".", "header", ".", "index", "(", "name", ")", "except", "ValueError", ":", "raise", "TableError", "(", "'Unknown index name %s.'", "%", "name", ")" ]
23.5
18.8125
def cleanShutdown(self, quickMode=False, stopReactor=True, _reactor=reactor): """Shut down the entire process, once all currently-running builds are complete. quickMode will mark all builds as retry (except the ones that were triggered) """ if self.shuttingDown: return log.msg("Initiating clean shutdown") self.shuttingDown = True # first, stop the distributor; this will finish any ongoing scheduling # operations before firing yield self.brd.disownServiceParent() # Double check that we're still supposed to be shutting down # The shutdown may have been cancelled! while self.shuttingDown: if quickMode: for builder in self.builders.values(): # As we stop the builds, builder.building might change during loop # so we need to copy the list for build in list(builder.building): # if build is waited for then this is a sub-build, so # no need to retry it if sum(br.waitedFor for br in build.requests): results = CANCELLED else: results = RETRY is_building = build.workerforbuilder.state == States.BUILDING build.stopBuild("Master Shutdown", results) if not is_building: # if it is not building, then it must be a latent worker # which is substantiating. Cancel it. build.workerforbuilder.worker.insubstantiate() # then wait for all builds to finish dl = [] for builder in self.builders.values(): for build in builder.building: # build may be waiting for ping to worker to succeed which # may never happen if the connection to worker was broken # without TCP connection being severed build.workerforbuilder.abortPingIfAny() dl.append(build.waitUntilFinished()) if not dl: log.msg("No running jobs, starting shutdown immediately") else: log.msg("Waiting for %i build(s) to finish" % len(dl)) yield defer.DeferredList(dl) # Check that there really aren't any running builds n = 0 for builder in self.builders.values(): if builder.building: num_builds = len(builder.building) log.msg("Builder %s has %i builds running" % (builder, num_builds)) n += num_builds if n > 0: log.msg( "Not shutting down, there are %i builds running" % n) log.msg("Trying shutdown sequence again") yield util.asyncSleep(1) else: if stopReactor and self.shuttingDown: log.msg("Stopping reactor") _reactor.stop() break if not self.shuttingDown: yield self.brd.setServiceParent(self)
[ "def", "cleanShutdown", "(", "self", ",", "quickMode", "=", "False", ",", "stopReactor", "=", "True", ",", "_reactor", "=", "reactor", ")", ":", "if", "self", ".", "shuttingDown", ":", "return", "log", ".", "msg", "(", "\"Initiating clean shutdown\"", ")", "self", ".", "shuttingDown", "=", "True", "# first, stop the distributor; this will finish any ongoing scheduling", "# operations before firing", "yield", "self", ".", "brd", ".", "disownServiceParent", "(", ")", "# Double check that we're still supposed to be shutting down", "# The shutdown may have been cancelled!", "while", "self", ".", "shuttingDown", ":", "if", "quickMode", ":", "for", "builder", "in", "self", ".", "builders", ".", "values", "(", ")", ":", "# As we stop the builds, builder.building might change during loop", "# so we need to copy the list", "for", "build", "in", "list", "(", "builder", ".", "building", ")", ":", "# if build is waited for then this is a sub-build, so", "# no need to retry it", "if", "sum", "(", "br", ".", "waitedFor", "for", "br", "in", "build", ".", "requests", ")", ":", "results", "=", "CANCELLED", "else", ":", "results", "=", "RETRY", "is_building", "=", "build", ".", "workerforbuilder", ".", "state", "==", "States", ".", "BUILDING", "build", ".", "stopBuild", "(", "\"Master Shutdown\"", ",", "results", ")", "if", "not", "is_building", ":", "# if it is not building, then it must be a latent worker", "# which is substantiating. Cancel it.", "build", ".", "workerforbuilder", ".", "worker", ".", "insubstantiate", "(", ")", "# then wait for all builds to finish", "dl", "=", "[", "]", "for", "builder", "in", "self", ".", "builders", ".", "values", "(", ")", ":", "for", "build", "in", "builder", ".", "building", ":", "# build may be waiting for ping to worker to succeed which", "# may never happen if the connection to worker was broken", "# without TCP connection being severed", "build", ".", "workerforbuilder", ".", "abortPingIfAny", "(", ")", "dl", ".", "append", "(", "build", ".", "waitUntilFinished", "(", ")", ")", "if", "not", "dl", ":", "log", ".", "msg", "(", "\"No running jobs, starting shutdown immediately\"", ")", "else", ":", "log", ".", "msg", "(", "\"Waiting for %i build(s) to finish\"", "%", "len", "(", "dl", ")", ")", "yield", "defer", ".", "DeferredList", "(", "dl", ")", "# Check that there really aren't any running builds", "n", "=", "0", "for", "builder", "in", "self", ".", "builders", ".", "values", "(", ")", ":", "if", "builder", ".", "building", ":", "num_builds", "=", "len", "(", "builder", ".", "building", ")", "log", ".", "msg", "(", "\"Builder %s has %i builds running\"", "%", "(", "builder", ",", "num_builds", ")", ")", "n", "+=", "num_builds", "if", "n", ">", "0", ":", "log", ".", "msg", "(", "\"Not shutting down, there are %i builds running\"", "%", "n", ")", "log", ".", "msg", "(", "\"Trying shutdown sequence again\"", ")", "yield", "util", ".", "asyncSleep", "(", "1", ")", "else", ":", "if", "stopReactor", "and", "self", ".", "shuttingDown", ":", "log", ".", "msg", "(", "\"Stopping reactor\"", ")", "_reactor", ".", "stop", "(", ")", "break", "if", "not", "self", ".", "shuttingDown", ":", "yield", "self", ".", "brd", ".", "setServiceParent", "(", "self", ")" ]
46.414286
19.057143
def learn_ids(self, item_list): """ read in already set ids on objects """ self._reset_sequence() for item in item_list: key = self.nondup_key_for_item(item) self.ids[key] = item[self.id_key]
[ "def", "learn_ids", "(", "self", ",", "item_list", ")", ":", "self", ".", "_reset_sequence", "(", ")", "for", "item", "in", "item_list", ":", "key", "=", "self", ".", "nondup_key_for_item", "(", "item", ")", "self", ".", "ids", "[", "key", "]", "=", "item", "[", "self", ".", "id_key", "]" ]
39
7
def shuffle_cols(seqarr, newarr, cols): """ used in bootstrap resampling without a map file """ for idx in xrange(cols.shape[0]): newarr[:, idx] = seqarr[:, cols[idx]] return newarr
[ "def", "shuffle_cols", "(", "seqarr", ",", "newarr", ",", "cols", ")", ":", "for", "idx", "in", "xrange", "(", "cols", ".", "shape", "[", "0", "]", ")", ":", "newarr", "[", ":", ",", "idx", "]", "=", "seqarr", "[", ":", ",", "cols", "[", "idx", "]", "]", "return", "newarr" ]
39.4
6.4
def plot_sed(sed, showlnl=False, **kwargs): """Render a plot of a spectral energy distribution. Parameters ---------- showlnl : bool Overlay a map of the delta-loglikelihood values vs. flux in each energy bin. cmap : str Colormap that will be used for the delta-loglikelihood map. llhcut : float Minimum delta-loglikelihood value. ul_ts_threshold : float TS threshold that determines whether the MLE or UL is plotted in each energy bin. """ ax = kwargs.pop('ax', plt.gca()) cmap = kwargs.get('cmap', 'BuGn') annotate_name(sed, ax=ax) SEDPlotter.plot_flux_points(sed, **kwargs) if np.any(sed['ts'] > 9.): if 'model_flux' in sed: SEDPlotter.plot_model(sed['model_flux'], noband=showlnl, **kwargs) if showlnl: SEDPlotter.plot_lnlscan(sed, **kwargs) ax.set_yscale('log') ax.set_xscale('log') ax.set_xlabel('Energy [MeV]') ax.set_ylabel('E$^{2}$dN/dE [MeV cm$^{-2}$ s$^{-1}$]')
[ "def", "plot_sed", "(", "sed", ",", "showlnl", "=", "False", ",", "*", "*", "kwargs", ")", ":", "ax", "=", "kwargs", ".", "pop", "(", "'ax'", ",", "plt", ".", "gca", "(", ")", ")", "cmap", "=", "kwargs", ".", "get", "(", "'cmap'", ",", "'BuGn'", ")", "annotate_name", "(", "sed", ",", "ax", "=", "ax", ")", "SEDPlotter", ".", "plot_flux_points", "(", "sed", ",", "*", "*", "kwargs", ")", "if", "np", ".", "any", "(", "sed", "[", "'ts'", "]", ">", "9.", ")", ":", "if", "'model_flux'", "in", "sed", ":", "SEDPlotter", ".", "plot_model", "(", "sed", "[", "'model_flux'", "]", ",", "noband", "=", "showlnl", ",", "*", "*", "kwargs", ")", "if", "showlnl", ":", "SEDPlotter", ".", "plot_lnlscan", "(", "sed", ",", "*", "*", "kwargs", ")", "ax", ".", "set_yscale", "(", "'log'", ")", "ax", ".", "set_xscale", "(", "'log'", ")", "ax", ".", "set_xlabel", "(", "'Energy [MeV]'", ")", "ax", ".", "set_ylabel", "(", "'E$^{2}$dN/dE [MeV cm$^{-2}$ s$^{-1}$]'", ")" ]
28.609756
19.390244
def subscribe(self, topic=b''): """subscribe to the SUB socket, to listen for incomming variables, return a stream that can be listened to.""" self.sockets[zmq.SUB].setsockopt(zmq.SUBSCRIBE, topic) poller = self.pollers[zmq.SUB] return poller
[ "def", "subscribe", "(", "self", ",", "topic", "=", "b''", ")", ":", "self", ".", "sockets", "[", "zmq", ".", "SUB", "]", ".", "setsockopt", "(", "zmq", ".", "SUBSCRIBE", ",", "topic", ")", "poller", "=", "self", ".", "pollers", "[", "zmq", ".", "SUB", "]", "return", "poller" ]
54
10.4
def non_tag_chars_from_raw(html): '''generator that yields clean visible as it transitions through states in the raw `html` ''' n = 0 while n < len(html): # find start of tag angle = html.find('<', n) if angle == -1: yield html[n:] n = len(html) break yield html[n:angle] n = angle # find the end of the tag string space = html.find(' ', n, n + longest_extended_tag + 2) angle = html.find('>', n, n + longest_extended_tag + 2) nl = html.find('\n', n, n + longest_extended_tag + 2) tab = html.find('\t', n, n + longest_extended_tag + 2) ends = filter(lambda end: end > -1, [tab, nl, space, angle]) if ends: tag = html[n + 1 : min(ends)] if tag == '!--': # whiteout comment except newlines end = html.find('-->', n) while n < end: nl = html.find('\n', n, end) if nl != -1: yield ' ' * (nl - n) + '\n' n = nl + 1 else: yield ' ' * (end - n + 3) break n = end + 3 continue is_extended = tag.lower() in extended_tags else: is_extended = False # find end of tag even if on a lower line while n < len(html): squote = html.find("'", n) dquote = html.find('"', n) nl = html.find('\n', n) angle = html.find('>', n) if angle == -1: # hits end of doc before end of tag yield ' ' * (len(html) - n) n = len(html) break elif -1 < squote < angle or -1 < dquote < angle: if squote != -1 and dquote != -1: if squote < dquote: open_quote = squote quote = "'" else: open_quote = dquote quote = '"' elif dquote != -1: open_quote = dquote quote = '"' else: open_quote = squote quote = "'" close_quote = html.find(quote, open_quote + 1) while n < close_quote: nl = html.find('\n', n, close_quote) if nl == -1: break yield ' ' * (nl - n) + '\n' n = nl + 1 yield ' ' * (close_quote + 1 - n) n = close_quote + 1 continue elif nl == -1 or angle < nl: # found close before either newline or end of doc yield ' ' * (angle + 1 - n) n = angle + 1 if is_extended and html[angle - 1] != '/': # find matching closing tag. JavaScript can # include HTML *strings* within it, and in # principle, that HTML could contain a closing # script tag in it; ignoring for now. while n < len(html): nl = html.find('\n', n) close = html.find('</', n) close2 = html.find('</', close + 2) angle = html.find('>', close + 2) if nl != -1 and nl < close: yield ' ' * (nl - n) + '\n' n = nl + 1 elif close == -1 or angle == -1: # end of doc before matching close tag yield ' ' * (len(html) - n) n = len(html) break elif close2 != -1 and close2 < angle: # broken tag inside current tag yield ' ' * (close + 2 - n) n = close + 2 elif html[close + 2:angle].lower() == tag.lower(): yield ' ' * (angle + 1 - n) n = angle + 1 break else: yield ' ' * (angle + 1 - n) n = angle + 1 # do not break # finished with tag break else: # found a newline within the current tag yield ' ' * (nl - n) + '\n' n = nl + 1
[ "def", "non_tag_chars_from_raw", "(", "html", ")", ":", "n", "=", "0", "while", "n", "<", "len", "(", "html", ")", ":", "# find start of tag", "angle", "=", "html", ".", "find", "(", "'<'", ",", "n", ")", "if", "angle", "==", "-", "1", ":", "yield", "html", "[", "n", ":", "]", "n", "=", "len", "(", "html", ")", "break", "yield", "html", "[", "n", ":", "angle", "]", "n", "=", "angle", "# find the end of the tag string", "space", "=", "html", ".", "find", "(", "' '", ",", "n", ",", "n", "+", "longest_extended_tag", "+", "2", ")", "angle", "=", "html", ".", "find", "(", "'>'", ",", "n", ",", "n", "+", "longest_extended_tag", "+", "2", ")", "nl", "=", "html", ".", "find", "(", "'\\n'", ",", "n", ",", "n", "+", "longest_extended_tag", "+", "2", ")", "tab", "=", "html", ".", "find", "(", "'\\t'", ",", "n", ",", "n", "+", "longest_extended_tag", "+", "2", ")", "ends", "=", "filter", "(", "lambda", "end", ":", "end", ">", "-", "1", ",", "[", "tab", ",", "nl", ",", "space", ",", "angle", "]", ")", "if", "ends", ":", "tag", "=", "html", "[", "n", "+", "1", ":", "min", "(", "ends", ")", "]", "if", "tag", "==", "'!--'", ":", "# whiteout comment except newlines", "end", "=", "html", ".", "find", "(", "'-->'", ",", "n", ")", "while", "n", "<", "end", ":", "nl", "=", "html", ".", "find", "(", "'\\n'", ",", "n", ",", "end", ")", "if", "nl", "!=", "-", "1", ":", "yield", "' '", "*", "(", "nl", "-", "n", ")", "+", "'\\n'", "n", "=", "nl", "+", "1", "else", ":", "yield", "' '", "*", "(", "end", "-", "n", "+", "3", ")", "break", "n", "=", "end", "+", "3", "continue", "is_extended", "=", "tag", ".", "lower", "(", ")", "in", "extended_tags", "else", ":", "is_extended", "=", "False", "# find end of tag even if on a lower line", "while", "n", "<", "len", "(", "html", ")", ":", "squote", "=", "html", ".", "find", "(", "\"'\"", ",", "n", ")", "dquote", "=", "html", ".", "find", "(", "'\"'", ",", "n", ")", "nl", "=", "html", ".", "find", "(", "'\\n'", ",", "n", ")", "angle", "=", "html", ".", "find", "(", "'>'", ",", "n", ")", "if", "angle", "==", "-", "1", ":", "# hits end of doc before end of tag", "yield", "' '", "*", "(", "len", "(", "html", ")", "-", "n", ")", "n", "=", "len", "(", "html", ")", "break", "elif", "-", "1", "<", "squote", "<", "angle", "or", "-", "1", "<", "dquote", "<", "angle", ":", "if", "squote", "!=", "-", "1", "and", "dquote", "!=", "-", "1", ":", "if", "squote", "<", "dquote", ":", "open_quote", "=", "squote", "quote", "=", "\"'\"", "else", ":", "open_quote", "=", "dquote", "quote", "=", "'\"'", "elif", "dquote", "!=", "-", "1", ":", "open_quote", "=", "dquote", "quote", "=", "'\"'", "else", ":", "open_quote", "=", "squote", "quote", "=", "\"'\"", "close_quote", "=", "html", ".", "find", "(", "quote", ",", "open_quote", "+", "1", ")", "while", "n", "<", "close_quote", ":", "nl", "=", "html", ".", "find", "(", "'\\n'", ",", "n", ",", "close_quote", ")", "if", "nl", "==", "-", "1", ":", "break", "yield", "' '", "*", "(", "nl", "-", "n", ")", "+", "'\\n'", "n", "=", "nl", "+", "1", "yield", "' '", "*", "(", "close_quote", "+", "1", "-", "n", ")", "n", "=", "close_quote", "+", "1", "continue", "elif", "nl", "==", "-", "1", "or", "angle", "<", "nl", ":", "# found close before either newline or end of doc", "yield", "' '", "*", "(", "angle", "+", "1", "-", "n", ")", "n", "=", "angle", "+", "1", "if", "is_extended", "and", "html", "[", "angle", "-", "1", "]", "!=", "'/'", ":", "# find matching closing tag. JavaScript can", "# include HTML *strings* within it, and in", "# principle, that HTML could contain a closing", "# script tag in it; ignoring for now.", "while", "n", "<", "len", "(", "html", ")", ":", "nl", "=", "html", ".", "find", "(", "'\\n'", ",", "n", ")", "close", "=", "html", ".", "find", "(", "'</'", ",", "n", ")", "close2", "=", "html", ".", "find", "(", "'</'", ",", "close", "+", "2", ")", "angle", "=", "html", ".", "find", "(", "'>'", ",", "close", "+", "2", ")", "if", "nl", "!=", "-", "1", "and", "nl", "<", "close", ":", "yield", "' '", "*", "(", "nl", "-", "n", ")", "+", "'\\n'", "n", "=", "nl", "+", "1", "elif", "close", "==", "-", "1", "or", "angle", "==", "-", "1", ":", "# end of doc before matching close tag", "yield", "' '", "*", "(", "len", "(", "html", ")", "-", "n", ")", "n", "=", "len", "(", "html", ")", "break", "elif", "close2", "!=", "-", "1", "and", "close2", "<", "angle", ":", "# broken tag inside current tag", "yield", "' '", "*", "(", "close", "+", "2", "-", "n", ")", "n", "=", "close", "+", "2", "elif", "html", "[", "close", "+", "2", ":", "angle", "]", ".", "lower", "(", ")", "==", "tag", ".", "lower", "(", ")", ":", "yield", "' '", "*", "(", "angle", "+", "1", "-", "n", ")", "n", "=", "angle", "+", "1", "break", "else", ":", "yield", "' '", "*", "(", "angle", "+", "1", "-", "n", ")", "n", "=", "angle", "+", "1", "# do not break", "# finished with tag", "break", "else", ":", "# found a newline within the current tag", "yield", "' '", "*", "(", "nl", "-", "n", ")", "+", "'\\n'", "n", "=", "nl", "+", "1" ]
39.362069
13.258621
def parse_findPeaks(self, f): """ Parse HOMER findPeaks file headers. """ parsed_data = dict() s_name = f['s_name'] for l in f['f']: # Start of data if l.strip() and not l.strip().startswith('#'): break # Automatically parse header lines by = symbol s = l[2:].split('=') if len(s) > 1: k = s[0].strip().replace(' ','_').lower() v = s[1].strip().replace('%','') try: parsed_data[k] = float(v) except ValueError: parsed_data[k] = v if k == 'tag_directory': s_name = self.clean_s_name(os.path.basename(v), os.path.dirname(v)) if len(parsed_data) > 0: if s_name in self.homer_findpeaks: log.debug("Duplicate sample name found in {}! Overwriting: {}".format(f['fn'], s_name)) self.add_data_source(f, s_name, section='findPeaks') self.homer_findpeaks[s_name] = parsed_data
[ "def", "parse_findPeaks", "(", "self", ",", "f", ")", ":", "parsed_data", "=", "dict", "(", ")", "s_name", "=", "f", "[", "'s_name'", "]", "for", "l", "in", "f", "[", "'f'", "]", ":", "# Start of data", "if", "l", ".", "strip", "(", ")", "and", "not", "l", ".", "strip", "(", ")", ".", "startswith", "(", "'#'", ")", ":", "break", "# Automatically parse header lines by = symbol", "s", "=", "l", "[", "2", ":", "]", ".", "split", "(", "'='", ")", "if", "len", "(", "s", ")", ">", "1", ":", "k", "=", "s", "[", "0", "]", ".", "strip", "(", ")", ".", "replace", "(", "' '", ",", "'_'", ")", ".", "lower", "(", ")", "v", "=", "s", "[", "1", "]", ".", "strip", "(", ")", ".", "replace", "(", "'%'", ",", "''", ")", "try", ":", "parsed_data", "[", "k", "]", "=", "float", "(", "v", ")", "except", "ValueError", ":", "parsed_data", "[", "k", "]", "=", "v", "if", "k", "==", "'tag_directory'", ":", "s_name", "=", "self", ".", "clean_s_name", "(", "os", ".", "path", ".", "basename", "(", "v", ")", ",", "os", ".", "path", ".", "dirname", "(", "v", ")", ")", "if", "len", "(", "parsed_data", ")", ">", "0", ":", "if", "s_name", "in", "self", ".", "homer_findpeaks", ":", "log", ".", "debug", "(", "\"Duplicate sample name found in {}! Overwriting: {}\"", ".", "format", "(", "f", "[", "'fn'", "]", ",", "s_name", ")", ")", "self", ".", "add_data_source", "(", "f", ",", "s_name", ",", "section", "=", "'findPeaks'", ")", "self", ".", "homer_findpeaks", "[", "s_name", "]", "=", "parsed_data" ]
42.04
16.08
def remove_menu(self, name): """Remove a top-level menu. Only removes menus created by the same menu manager. """ if name not in self._menus: raise exceptions.MenuNotFound( "Menu {!r} was not found. It might be deleted, or belong to another menu manager.".format(name)) self._menu.removeAction(self._menus[name].menuAction()) del self._menus[name]
[ "def", "remove_menu", "(", "self", ",", "name", ")", ":", "if", "name", "not", "in", "self", ".", "_menus", ":", "raise", "exceptions", ".", "MenuNotFound", "(", "\"Menu {!r} was not found. It might be deleted, or belong to another menu manager.\"", ".", "format", "(", "name", ")", ")", "self", ".", "_menu", ".", "removeAction", "(", "self", ".", "_menus", "[", "name", "]", ".", "menuAction", "(", ")", ")", "del", "self", ".", "_menus", "[", "name", "]" ]
37.727273
20.454545
def delete_message(self, id, remove): """ Delete a message. Delete messages from this conversation. Note that this only affects this user's view of the conversation. If all messages are deleted, the conversation will be as well (equivalent to DELETE) """ path = {} data = {} params = {} # REQUIRED - PATH - id """ID""" path["id"] = id # REQUIRED - remove """Array of message ids to be deleted""" data["remove"] = remove self.logger.debug("POST /api/v1/conversations/{id}/remove_messages with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("POST", "/api/v1/conversations/{id}/remove_messages".format(**path), data=data, params=params, no_data=True)
[ "def", "delete_message", "(", "self", ",", "id", ",", "remove", ")", ":", "path", "=", "{", "}", "data", "=", "{", "}", "params", "=", "{", "}", "# REQUIRED - PATH - id\r", "\"\"\"ID\"\"\"", "path", "[", "\"id\"", "]", "=", "id", "# REQUIRED - remove\r", "\"\"\"Array of message ids to be deleted\"\"\"", "data", "[", "\"remove\"", "]", "=", "remove", "self", ".", "logger", ".", "debug", "(", "\"POST /api/v1/conversations/{id}/remove_messages with query params: {params} and form data: {data}\"", ".", "format", "(", "params", "=", "params", ",", "data", "=", "data", ",", "*", "*", "path", ")", ")", "return", "self", ".", "generic_request", "(", "\"POST\"", ",", "\"/api/v1/conversations/{id}/remove_messages\"", ".", "format", "(", "*", "*", "path", ")", ",", "data", "=", "data", ",", "params", "=", "params", ",", "no_data", "=", "True", ")" ]
38.909091
27.636364
def _remove_session_save_objects(self): """Used during exception handling in case we need to remove() session: keep instances and merge them in the new session. """ if self.testing: return # Before destroying the session, get all instances to be attached to the # new session. Without this, we get DetachedInstance errors, like when # tryin to get user's attribute in the error page... old_session = db.session() g_objs = [] for key in iter(g): obj = getattr(g, key) if isinstance(obj, db.Model) and sa.orm.object_session(obj) in ( None, old_session, ): g_objs.append((key, obj, obj in old_session.dirty)) db.session.remove() session = db.session() for key, obj, load in g_objs: # replace obj instance in bad session by new instance in fresh # session setattr(g, key, session.merge(obj, load=load)) # refresh `current_user` user = getattr(_request_ctx_stack.top, "user", None) if user is not None and isinstance(user, db.Model): _request_ctx_stack.top.user = session.merge(user, load=load)
[ "def", "_remove_session_save_objects", "(", "self", ")", ":", "if", "self", ".", "testing", ":", "return", "# Before destroying the session, get all instances to be attached to the", "# new session. Without this, we get DetachedInstance errors, like when", "# tryin to get user's attribute in the error page...", "old_session", "=", "db", ".", "session", "(", ")", "g_objs", "=", "[", "]", "for", "key", "in", "iter", "(", "g", ")", ":", "obj", "=", "getattr", "(", "g", ",", "key", ")", "if", "isinstance", "(", "obj", ",", "db", ".", "Model", ")", "and", "sa", ".", "orm", ".", "object_session", "(", "obj", ")", "in", "(", "None", ",", "old_session", ",", ")", ":", "g_objs", ".", "append", "(", "(", "key", ",", "obj", ",", "obj", "in", "old_session", ".", "dirty", ")", ")", "db", ".", "session", ".", "remove", "(", ")", "session", "=", "db", ".", "session", "(", ")", "for", "key", ",", "obj", ",", "load", "in", "g_objs", ":", "# replace obj instance in bad session by new instance in fresh", "# session", "setattr", "(", "g", ",", "key", ",", "session", ".", "merge", "(", "obj", ",", "load", "=", "load", ")", ")", "# refresh `current_user`", "user", "=", "getattr", "(", "_request_ctx_stack", ".", "top", ",", "\"user\"", ",", "None", ")", "if", "user", "is", "not", "None", "and", "isinstance", "(", "user", ",", "db", ".", "Model", ")", ":", "_request_ctx_stack", ".", "top", ".", "user", "=", "session", ".", "merge", "(", "user", ",", "load", "=", "load", ")" ]
38.5625
20.53125
def getSignalHeader(self, chn): """ Returns the header of one signal as dicts Parameters ---------- None """ return {'label': self.getLabel(chn), 'dimension': self.getPhysicalDimension(chn), 'sample_rate': self.getSampleFrequency(chn), 'physical_max':self.getPhysicalMaximum(chn), 'physical_min': self.getPhysicalMinimum(chn), 'digital_max': self.getDigitalMaximum(chn), 'digital_min': self.getDigitalMinimum(chn), 'prefilter':self.getPrefilter(chn), 'transducer': self.getTransducer(chn)}
[ "def", "getSignalHeader", "(", "self", ",", "chn", ")", ":", "return", "{", "'label'", ":", "self", ".", "getLabel", "(", "chn", ")", ",", "'dimension'", ":", "self", ".", "getPhysicalDimension", "(", "chn", ")", ",", "'sample_rate'", ":", "self", ".", "getSampleFrequency", "(", "chn", ")", ",", "'physical_max'", ":", "self", ".", "getPhysicalMaximum", "(", "chn", ")", ",", "'physical_min'", ":", "self", ".", "getPhysicalMinimum", "(", "chn", ")", ",", "'digital_max'", ":", "self", ".", "getDigitalMaximum", "(", "chn", ")", ",", "'digital_min'", ":", "self", ".", "getDigitalMinimum", "(", "chn", ")", ",", "'prefilter'", ":", "self", ".", "getPrefilter", "(", "chn", ")", ",", "'transducer'", ":", "self", ".", "getTransducer", "(", "chn", ")", "}" ]
39.823529
17.470588
def normalize_vector(x, y, z): """ Normalizes vector to produce a unit vector. Parameters ---------- x : float or array-like X component of vector y : float or array-like Y component of vector z : float or array-like Z component of vector Returns ------- x, y, z Unit vector x,y,z components """ mag = np.sqrt(x**2 + y**2 + z**2) x = x/mag y = y/mag z = z/mag return x, y, z
[ "def", "normalize_vector", "(", "x", ",", "y", ",", "z", ")", ":", "mag", "=", "np", ".", "sqrt", "(", "x", "**", "2", "+", "y", "**", "2", "+", "z", "**", "2", ")", "x", "=", "x", "/", "mag", "y", "=", "y", "/", "mag", "z", "=", "z", "/", "mag", "return", "x", ",", "y", ",", "z" ]
18.96
18.96
def info(ctx): """ Display status of YubiKey Slots. """ dev = ctx.obj['dev'] controller = ctx.obj['controller'] slot1, slot2 = controller.slot_status click.echo('Slot 1: {}'.format(slot1 and 'programmed' or 'empty')) click.echo('Slot 2: {}'.format(slot2 and 'programmed' or 'empty')) if dev.is_fips: click.echo('FIPS Approved Mode: {}'.format( 'Yes' if controller.is_in_fips_mode else 'No'))
[ "def", "info", "(", "ctx", ")", ":", "dev", "=", "ctx", ".", "obj", "[", "'dev'", "]", "controller", "=", "ctx", ".", "obj", "[", "'controller'", "]", "slot1", ",", "slot2", "=", "controller", ".", "slot_status", "click", ".", "echo", "(", "'Slot 1: {}'", ".", "format", "(", "slot1", "and", "'programmed'", "or", "'empty'", ")", ")", "click", ".", "echo", "(", "'Slot 2: {}'", ".", "format", "(", "slot2", "and", "'programmed'", "or", "'empty'", ")", ")", "if", "dev", ".", "is_fips", ":", "click", ".", "echo", "(", "'FIPS Approved Mode: {}'", ".", "format", "(", "'Yes'", "if", "controller", ".", "is_in_fips_mode", "else", "'No'", ")", ")" ]
31.142857
17.142857
def getPlainText(self, iv, key, ciphertext): """ :type iv: bytearray :type key: bytearray :type ciphertext: bytearray """ try: cipher = AESCipher(key, iv) plaintext = cipher.decrypt(ciphertext) if sys.version_info >= (3, 0): return plaintext.decode() return plaintext except Exception as e: raise InvalidMessageException(e)
[ "def", "getPlainText", "(", "self", ",", "iv", ",", "key", ",", "ciphertext", ")", ":", "try", ":", "cipher", "=", "AESCipher", "(", "key", ",", "iv", ")", "plaintext", "=", "cipher", ".", "decrypt", "(", "ciphertext", ")", "if", "sys", ".", "version_info", ">=", "(", "3", ",", "0", ")", ":", "return", "plaintext", ".", "decode", "(", ")", "return", "plaintext", "except", "Exception", "as", "e", ":", "raise", "InvalidMessageException", "(", "e", ")" ]
31.571429
7.285714