Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
386,200
def debug(self, message, *args, **kwargs): self._log(logging.DEBUG, message, *args, **kwargs)
Debug level to use and abuse when coding
386,201
def _set_fcoe_max_enode(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={: []}, int_size=32), restriction_dict={: [u]}), is_leaf=True, yang_name="fcoe-max-enode", rest_name="fcoe-enodes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: None, u: u, u: u, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "brocade-fcoe:fcoe-max-enodes-per-rbridge-type", : , }) self.__fcoe_max_enode = t if hasattr(self, ): self._set()
Setter method for fcoe_max_enode, mapped from YANG variable /rbridge_id/fcoe_config/fcoe_max_enode (fcoe-max-enodes-per-rbridge-type) If this variable is read-only (config: false) in the source YANG file, then _set_fcoe_max_enode is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_fcoe_max_enode() directly. YANG Description: This specifies the Number of the FCoE Enodes.
386,202
def generate_html_documentation(self): methods = {} for method_name in self.system_listMethods(): if method_name in self.funcs: method = self.funcs[method_name] elif self.instance is not None: method_info = [None, None] if hasattr(self.instance, ): method_info[0] = self.instance._get_method_argstring(method_name) if hasattr(self.instance, ): method_info[1] = self.instance._methodHelp(method_name) method_info = tuple(method_info) if method_info != (None, None): method = method_info elif not hasattr(self.instance, ): try: method = resolve_dotted_attribute( self.instance, method_name ) except AttributeError: method = method_info else: method = method_info else: assert 0, "Could not find method in self.functions and no "\ "instance installed" methods[method_name] = method documenter = ServerHTMLDoc() documentation = documenter.docserver( self.server_name, self.server_documentation, methods ) return documenter.page(self.server_title, documentation)
generate_html_documentation() => html documentation for the server Generates HTML documentation for the server using introspection for installed functions and instances that do not implement the _dispatch method. Alternatively, instances can choose to implement the _get_method_argstring(method_name) method to provide the argument string used in the documentation and the _methodHelp(method_name) method to provide the help text used in the documentation.
386,203
def _parse_migrate_output(self, output): failed = None succeeded = [] for line in output.split(): line = _remove_escape_characters(line).strip() line_match = self.migration_regex.match(line) if line_match: migration = (line_match.group(), line_match.group()) if line_match.group() == : succeeded.append(migration) else: failed = migration break return succeeded, failed
Args: output: str, output of "manage.py migrate" Returns (succeeded: list(tuple), failed: tuple or None) Both tuples are of the form (app, migration)
386,204
def get_directory(db, user_id, api_dirname, content): db_dirname = from_api_dirname(api_dirname) if not _dir_exists(db, user_id, db_dirname): raise NoSuchDirectory(api_dirname) if content: files = files_in_directory( db, user_id, db_dirname, ) subdirectories = directories_in_directory( db, user_id, db_dirname, ) else: files, subdirectories = None, None return { : db_dirname, : files, : subdirectories, }
Return the names of all files/directories that are direct children of api_dirname. If content is False, return a bare model containing just a database-style name.
386,205
def _zforce(self,R,z,phi=0,t=0): r= numpy.sqrt(R**2.+z**2.) out= self._scf.zforce(R,z,phi=phi,use_physical=False) for a,s,ds,H,dH in zip(self._Sigma_amp,self._Sigma,self._dSigmadR, self._Hz,self._dHzdz): out-= 4.*numpy.pi*a*(ds(r)*H(z)*z/r+s(r)*dH(z)) return out
NAME: _zforce PURPOSE: evaluate the vertical force at (R,z, phi) INPUT: R - Cylindrical Galactocentric radius z - vertical height phi - azimuth t - time OUTPUT: vertical force at (R,z, phi) HISTORY: 2016-12-26 - Written - Bovy (UofT/CCA)
386,206
def __filterItems(self, terms, autoExpand=True, caseSensitive=False, parent=None, level=0): if key in generic: generic_found[key] = True if key in specific: col_found[column] = True mfound = all(col_found.values()) and \ all(generic_found.values()) if mfound: break if not mfound and (autoExpand or item.isExpanded()): mfound = self.__filterItems(terms, autoExpand, caseSensitive, item, level + 1) item.setHidden(not mfound) if mfound: found = True if mfound and autoExpand and item.childCount(): item.setExpanded(True) return found
Filters the items in this tree based on the inputed keywords. :param terms | {<int> column: [<str> term, ..], ..} autoExpand | <bool> caseSensitive | <bool> parent | <QtGui.QTreeWidgetItem> || None :return <bool> | found
386,207
def organismsKEGG(): organisms=urlopen("http://rest.kegg.jp/list/organism").read() organisms=organisms.split("\n") organisms=[ s.split("\t") for s in organisms ] organisms=pd.DataFrame(organisms) return organisms
Lists all organisms present in the KEGG database. :returns: a dataframe containing one organism per row.
386,208
def sequential_connect(self): try: mappings = sequential_bind(self.mapping_no + 1, self.interface) con = self.server_connect(mappings[0]["sock"]) except Exception as e: log.debug(e) log.debug("this err") return None mappings.remove(mappings[0]) msg = "SOURCE TCP %s" % (str(mappings[0]["source"])) con.send_line(msg) reply = con.recv_line(timeout=2) remote_port = self.parse_remote_port(reply) if not remote_port: return None predictions = "" if self.nat_type != "random": mappings = self.predict_mappings(mappings) for mapping in mappings: predictions += str(mapping["remote"]) + " " predictions = predictions.rstrip() else: predictions = "1337" return [con, mappings, predictions]
Sequential connect is designed to return a connection to the Rendezvous Server but it does so in a way that the local port ranges (both for the server and used for subsequent hole punching) are allocated sequentially and predictably. This is because Delta+1 type NATs only preserve the delta value when the source ports increase by one.
386,209
def request(self, url, *, method=, headers=None, data=None, result_callback=None): url = self._make_full_url(url) self._log.debug(, method, url) return self._request(url, method=method, headers=headers, data=data, result_callback=result_callback)
Perform request. :param str url: request URL. :param str method: request method. :param dict headers: request headers. :param object data: request data. :param object -> object result_callback: result callback. :rtype: dict :raise: APIError
386,210
def _list_itemstrs(list_, **kwargs): items = list(list_) kwargs[] = True _tups = [repr2(item, **kwargs) for item in items] itemstrs = [t[0] for t in _tups] max_height = max([t[1][] for t in _tups]) if _tups else 0 _leaf_info = { : max_height + 1, } sort = kwargs.get(, None) if sort is None: sort = isinstance(list_, (set, frozenset)) if sort: itemstrs = _sort_itemstrs(items, itemstrs) return itemstrs, _leaf_info
Create a string representation for each item in a list.
386,211
def design_expparams_field(self, guess, field, cost_scale_k=1.0, disp=False, maxiter=None, maxfun=None, store_guess=False, grad_h=None, cost_mult=False ): r up = self._updater m = up.model if isinstance(guess, Heuristic): raise NotImplementedError("Not yet implemented.") elif callable(guess): ep = guess( idx_exp=len(up.data_record), mean=up.est_mean(), cov=up.est_covariance_mtx() ) else: ep = np.copy(guess) if (cost_mult==False): def objective_function(x): ep[field] = x return up.bayes_risk(ep) + cost_scale_k * m.experiment_cost(ep) else: def objective_function(x): ep[field] = x return up.bayes_risk(ep)* m.experiment_cost(ep)**cost_scale_k d_dx_objective = FiniteDifference(objective_function, ep[field].size) if self._opt_algo == OptimizationAlgorithms.NULL: x_opt = guess[0][field] elif self._opt_algo == OptimizationAlgorithms.CG: opt_options = {} if maxiter is not None: opt_options[] = maxiter x_opt, f_opt, func_calls, grad_calls, warnflag = opt.fmin_cg( objective_function, guess[0][field], disp=disp, full_output=True, **opt_options ) elif self._opt_algo == OptimizationAlgorithms.NCG: opt_options = {} if maxfun is not None: opt_options[] = maxfun if grad_h is not None: opt_options[] = grad_h if f_opt is None: guess_qual = objective_function(x_opt) if self.__best_cost is None or (self.__best_cost > f_opt): ep[field] = x_opt return ep
r""" Designs a new experiment by varying a single field of a shape ``(1,)`` record array and minimizing the objective function .. math:: O(\vec{e}) = r(\vec{e}) + k \$(\vec{e}), where :math:`r` is the Bayes risk as calculated by the updater, and where :math:`\$` is the cost function specified by the model. Here, :math:`k` is a parameter specified to relate the units of the risk and the cost. See :ref:`expdesign` for more details. :param guess: Either a record array with a single guess, or a callable function that generates guesses. :type guess: Instance of :class:`~Heuristic`, `callable` or :class:`~numpy.ndarray` of ``dtype`` :attr:`~qinfer.abstract_model.Simulatable.expparams_dtype` :param str field: The name of the ``expparams`` field to be optimized. All other fields of ``guess`` will be held constant. :param float cost_scale_k: A scale parameter :math:`k` relating the Bayes risk to the experiment cost. See :ref:`expdesign`. :param bool disp: If `True`, the optimization will print additional information as it proceeds. :param int maxiter: For those optimization algorithms which support it (currently, only CG and NELDER_MEAD), limits the number of optimization iterations used for each guess. :param int maxfun: For those optimization algorithms which support it (currently, only NCG and NELDER_MEAD), limits the number of objective calls that can be made. :param bool store_guess: If ``True``, will compare the outcome of this guess to previous guesses and then either store the optimization of this experiment, or the previous best-known experiment design. :param float grad_h: Step size to use in estimating gradients. Used only if ``opt_algo`` is NCG. :return: An array representing the best experiment design found so far for the current experiment.
386,212
def remove(self, child): for i in range(len(self)): if self[i] == child: del self[i]
Remove a child element.
386,213
def netconf_state_schemas_schema_identifier(self, **kwargs): config = ET.Element("config") netconf_state = ET.SubElement(config, "netconf-state", xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring") schemas = ET.SubElement(netconf_state, "schemas") schema = ET.SubElement(schemas, "schema") version_key = ET.SubElement(schema, "version") version_key.text = kwargs.pop() format_key = ET.SubElement(schema, "format") format_key.text = kwargs.pop() identifier = ET.SubElement(schema, "identifier") identifier.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
386,214
def get_interface_detail_output_interface_ip_mtu(self, **kwargs): config = ET.Element("config") get_interface_detail = ET.Element("get_interface_detail") config = get_interface_detail output = ET.SubElement(get_interface_detail, "output") interface = ET.SubElement(output, "interface") interface_type_key = ET.SubElement(interface, "interface-type") interface_type_key.text = kwargs.pop() interface_name_key = ET.SubElement(interface, "interface-name") interface_name_key.text = kwargs.pop() ip_mtu = ET.SubElement(interface, "ip-mtu") ip_mtu.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
386,215
def _layer_norm_new_params(input_shape, rng, epsilon=1e-6): del rng, epsilon features = input_shape[-1] scale = np.ones(features) bias = np.zeros(features) return (scale, bias)
Helper: create layer norm parameters.
386,216
def get_permissions(self): user_role = self.last_login_role() if self.last_login_role_key else self.role_set[0].role return user_role.get_permissions()
Permissions of the user. Returns: List of Permission objects.
386,217
def feed_fetch_force(request, id, redirect_to): feed = Feed.objects.get(id=id) feed.fetch(force=True) msg = _("Fetched tweets for %s" % feed.name) messages.success(request, msg, fail_silently=True) return HttpResponseRedirect(redirect_to)
Forcibly fetch tweets for the feed
386,218
def repeat_str(state): if state == const.REPEAT_STATE_OFF: return if state == const.REPEAT_STATE_TRACK: return if state == const.REPEAT_STATE_ALL: return return
Convert internal API repeat state to string.
386,219
def is_threat(self, result=None, harmless_age=None, threat_score=None, threat_type=None): harmless_age = harmless_age if harmless_age is not None else settings.CACHED_HTTPBL_HARMLESS_AGE threat_score = threat_score if threat_score is not None else settings.CACHED_HTTPBL_THREAT_SCORE threat_type = threat_type if threat_type is not None else -1 result = result if result is not None else self._last_result threat = False if result is not None: if result[] < harmless_age and result[] > threat_score: threat = True if threat_type > -1: if result[] & threat_type: threat = True else: threat = False return threat
Check if IP is a threat :param result: httpBL results; if None, then results from last check_ip() used (optional) :param harmless_age: harmless age for check if httpBL age is older (optional) :param threat_score: threat score for check if httpBL threat is lower (optional) :param threat_type: threat type, if not equal httpBL score type, then return False (optional) :return: True or False
386,220
def quit(self, message=None): if message is None: message = if self.connected: self.send(, params=[message])
Quit from the server.
386,221
def start_dashboard(self): stdout_file, stderr_file = self.new_log_files("dashboard", True) self._webui_url, process_info = ray.services.start_dashboard( self.redis_address, self._temp_dir, stdout_file=stdout_file, stderr_file=stderr_file, redis_password=self._ray_params.redis_password) assert ray_constants.PROCESS_TYPE_DASHBOARD not in self.all_processes if process_info is not None: self.all_processes[ray_constants.PROCESS_TYPE_DASHBOARD] = [ process_info ] redis_client = self.create_redis_client() redis_client.hmset("webui", {"url": self._webui_url})
Start the dashboard.
386,222
def extract(self, package_name): for cmd in [, ]: if not Cmd.which(cmd): message = .format(cmd) raise InstallError(message) pattern = .format(package_name, self.arch) rpm_files = Cmd.find(, pattern) if not rpm_files: raise InstallError() cmd = .format(rpm_files[0]) Cmd.sh_e(cmd)
Extract given package.
386,223
def audio_inputs(self): return self.client.get_ports(is_audio=True, is_physical=True, is_input=True)
:return: A list of audio input :class:`Ports`.
386,224
def _GenerateDescription(self): manifest = { "description": self.description, "processed_files": len(self.processed_files), "archived_files": len(self.archived_files), "ignored_files": len(self.ignored_files), "failed_files": len(self.failed_files) } if self.ignored_files: manifest["ignored_files_list"] = [ _ClientPathToString(cp, prefix="aff4:") for cp in self.ignored_files ] if self.failed_files: manifest["failed_files_list"] = [ _ClientPathToString(cp, prefix="aff4:") for cp in self.failed_files ] manifest_fd = io.BytesIO() if self.total_files != len(self.archived_files): manifest_fd.write(self.FILES_SKIPPED_WARNING) manifest_fd.write(yaml.Dump(manifest).encode("utf-8")) manifest_fd.seek(0) st = os.stat_result( (0o644, 0, 0, 0, 0, 0, len(manifest_fd.getvalue()), 0, 0, 0)) for chunk in self.archive_generator.WriteFromFD( manifest_fd, os.path.join(self.prefix, "MANIFEST"), st=st): yield chunk
Generates description into a MANIFEST file in the archive.
386,225
def gen_xlsx_table_info(): XLSX_FILE = if os.path.exists(XLSX_FILE): pass else: return RAW_LIST = [, , , , , , , , , , , , , , , , , , , , , , , , , ] FILTER_COLUMNS = RAW_LIST + ["A" + x for x in RAW_LIST] + \ ["B" + x for x in RAW_LIST] + \ ["C" + x for x in RAW_LIST] + \ ["D" + x for x in RAW_LIST] tvalue = [] file_d = open_workbook(XLSX_FILE) x = 0 for sheet_ranges in load_workbook(filename=XLSX_FILE): select_sheet = file_d.sheets()[x] rows_num = select_sheet.nrows + 1 for row_num in range(6, rows_num): tvalue = [] for xr in FILTER_COLUMNS: row1_val = sheet_ranges[xr + ].value row4_val = sheet_ranges[xr + .format(row_num)].value if row1_val: if row4_val == None: row4_val = tvalue.append(row4_val) insert_tab(tvalue) x = x + 1 print("成功插入 " + str(rows_num - 6) + " 行数据")
向表中插入数据
386,226
def _parse_response(self, xmlstr, response_cls, service, binding, outstanding_certs=None, **kwargs): if self.config.accepted_time_diff: kwargs["timeslack"] = self.config.accepted_time_diff if "asynchop" not in kwargs: if binding in [BINDING_SOAP, BINDING_PAOS]: kwargs["asynchop"] = False else: kwargs["asynchop"] = True response = None if not xmlstr: return response if "return_addrs" not in kwargs: bindings = { BINDING_SOAP, BINDING_HTTP_REDIRECT, BINDING_HTTP_POST, } if binding in bindings: kwargs["return_addrs"] = self.config.endpoint( service, binding=binding, context=self.entity_type) try: response = response_cls(self.sec, **kwargs) except Exception as exc: logger.info("%s", exc) raise xmlstr = self.unravel(xmlstr, binding, response_cls.msgtype) if not xmlstr: return None try: response_is_signed = False require_response_signature = response.require_response_signature response.require_response_signature = True response = response.loads(xmlstr, False, origxml=xmlstr) except SigverError as err: if require_response_signature: logger.error("Signature Error: %s", err) raise else: response.require_response_signature = require_response_signature response = response.loads(xmlstr, False, origxml=xmlstr) except UnsolicitedResponse: logger.error("Unsolicited response") raise except Exception as err: if "not well-formed" in "%s" % err: logger.error("Not well-formed XML") raise else: response_is_signed = True finally: response.require_response_signature = require_response_signature logger.debug("XMLSTR: %s", xmlstr) if not response: return response keys = None if outstanding_certs: try: cert = outstanding_certs[response.in_response_to] except KeyError: keys = None else: if not isinstance(cert, list): cert = [cert] keys = [] for _cert in cert: keys.append(_cert["key"]) try: assertions_are_signed = False require_signature = response.require_signature response.require_signature = True response = response.verify(keys) except SignatureError as err: if require_signature: logger.error("Signature Error: %s", err) raise else: response.require_signature = require_signature response = response.verify(keys) else: assertions_are_signed = True finally: response.require_signature = require_signature if response.require_signature_or_response_signature: if not response_is_signed and not assertions_are_signed: msg = "Neither the response nor the assertions are signed" logger.error(msg) raise SigverError(msg) return response
Deal with a Response :param xmlstr: The response as a xml string :param response_cls: What type of response it is :param binding: What type of binding this message came through. :param outstanding_certs: Certificates that belongs to me that the IdP may have used to encrypt a response/assertion/.. :param kwargs: Extra key word arguments :return: None if the reply doesn't contain a valid SAML Response, otherwise the response.
386,227
def field_to_dict(fields): field_dict = {} for field in fields: d_tmp = field_dict for part in field.split(LOOKUP_SEP)[:-1]: d_tmp = d_tmp.setdefault(part, {}) d_tmp = d_tmp.setdefault( field.split(LOOKUP_SEP)[-1], deepcopy(EMPTY_DICT) ).update(deepcopy(EMPTY_DICT)) return field_dict
Build dictionnary which dependancy for each field related to "root" fields = ["toto", "toto__tata", "titi__tutu"] dico = { "toto": { EMPTY_DICT, "tata": EMPTY_DICT }, "titi" : { "tutu": EMPTY_DICT } } EMPTY_DICT is useful because we don't lose field without it dico["toto"] would only contains "tata" inspired from django.db.models.sql.add_select_related
386,228
def ReadDataFile(self): if os.path.isfile(self.filename[0:-4] + ): filename = self.filename[0:-4] + elif os.path.isfile(self.filename[0:-4] + ): filename = self.filename[0:-4] + else: print "Data file File not found." return 0 self.filehandler = open(filename,) self.DatFileContent = self.filehandler.read() self.filehandler.close() return 1
Reads the contents of the Comtrade .dat file and store them in a private variable. For accessing a specific channel data, see methods getAnalogData and getDigitalData.
386,229
def AgregarFusion(self, nro_ing_brutos, nro_actividad, **kwargs): "Datos de comprador o vendedor según liquidación a ajustar (fusión.)" self.ajuste[][] = {: nro_ing_brutos, : nro_actividad, } return True
Datos de comprador o vendedor según liquidación a ajustar (fusión.)
386,230
def rget(self, key, replica_index=None, quiet=None): if replica_index is not None: return _Base._rgetix(self, key, replica=replica_index, quiet=quiet) else: return _Base._rget(self, key, quiet=quiet)
Get an item from a replica node :param string key: The key to fetch :param int replica_index: The replica index to fetch. If this is ``None`` then this method will return once any replica responds. Use :attr:`configured_replica_count` to figure out the upper bound for this parameter. The value for this parameter must be a number between 0 and the value of :attr:`configured_replica_count`-1. :param boolean quiet: Whether to suppress errors when the key is not found This method (if `replica_index` is not supplied) functions like the :meth:`get` method that has been passed the `replica` parameter:: c.get(key, replica=True) .. seealso:: :meth:`get` :meth:`rget_multi`
386,231
def plot_site(fignum, SiteRec, data, key): print() print() print(SiteRec[], SiteRec[], SiteRec[], SiteRec[], SiteRec[], SiteRec[], SiteRec[], SiteRec[], SiteRec[]) print() for i in range(len(data)): print( % (data[i][ + key + ], data[i][key + ], data[i] [key + ], data[i][key + ], data[i][key + ], data[i][])) plot_slnp(fignum, SiteRec, data, key) plot = input("s[a]ve plot, [q]uit or <return> to continue: ") if plot == : print("CUL8R") sys.exit() if plot == : files = {} for key in list(EQ.keys()): files[key] = site + + key + + fmt save_plots(EQ, files)
deprecated (used in ipmag)
386,232
def _generate_struct_class(self, ns, data_type): self.emit(self._class_declaration_for_type(ns, data_type)) with self.indent(): if data_type.has_documented_type_or_fields(): self.emit() self.emit() self._generate_struct_class_slots(data_type) self._generate_struct_class_has_required_fields(data_type) self._generate_struct_class_init(data_type) self._generate_struct_class_properties(ns, data_type) self._generate_struct_class_custom_annotations(ns, data_type) self._generate_struct_class_repr(data_type) if data_type.has_enumerated_subtypes(): validator = else: validator = self.emit(.format( class_name_for_data_type(data_type), validator, )) self.emit()
Defines a Python class that represents a struct in Stone.
386,233
def to_datetime(dt, tzinfo=None, format=None): if not dt: return dt tz = pick_timezone(tzinfo, __timezone__) if isinstance(dt, (str, unicode)): if not format: formats = DEFAULT_DATETIME_INPUT_FORMATS else: formats = list(format) d = None for fmt in formats: try: d = datetime.strptime(dt, fmt) except ValueError: continue if not d: return None d = d.replace(tzinfo=tz) else: d = datetime(getattr(dt, , 1970), getattr(dt, , 1), getattr(dt, , 1), getattr(dt, , 0), getattr(dt, , 0), getattr(dt, , 0), getattr(dt, , 0)) if not getattr(dt, , None): d = d.replace(tzinfo=tz) else: d = d.replace(tzinfo=dt.tzinfo) return to_timezone(d, tzinfo)
Convert a date or time to datetime with tzinfo
386,234
def mutual_information( self, M_c, X_L_list, X_D_list, Q, seed, n_samples=1000): get_next_seed = make_get_next_seed(seed) return iu.mutual_information( M_c, X_L_list, X_D_list, Q, get_next_seed, n_samples)
Estimate mutual information for each pair of columns on Q given the set of samples. :param Q: List of tuples where each tuple contains the two column indexes to compare :type Q: list of two-tuples of ints :param n_samples: the number of simple predictive samples to use :type n_samples: int :returns: list of list -- where each sublist is a set of MIs and Linfoots from each crosscat sample.
386,235
def interrupt(self): if(self.device.read(9) & 0x01): self.handle_request() self.device.clear_IR()
Invoked on a write operation into the IR of the RendererDevice.
386,236
def pop(self, key, *args): try: return self.maps[0].pop(key, *args) except KeyError: raise KeyError(.format(key))
Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].
386,237
def calculate_lyapunov(self): if self._calculate_megno==0: raise RuntimeError("Lyapunov Characteristic Number cannot be calculated. Make sure to call init_megno() after adding all particles but before integrating the simulation.") clibrebound.reb_tools_calculate_lyapunov.restype = c_double return clibrebound.reb_tools_calculate_lyapunov(byref(self))
Return the current Lyapunov Characteristic Number (LCN). Note that you need to call init_megno() before the start of the simulation. To get a timescale (the Lyapunov timescale), take the inverse of this quantity.
386,238
def _get_nsamps_samples_n(res): try: samples_n = res.samples_n nsamps = len(samples_n) except: niter = res.niter nlive = res.nlive nsamps = len(res.logvol) if nsamps == niter: samples_n = np.ones(niter, dtype=) * nlive elif nsamps == (niter + nlive): samples_n = np.append(np.ones(niter, dtype=) * nlive, np.arange(1, nlive + 1)[::-1]) else: raise ValueError("Final number of samples differs from number of " "iterations and number of live points.") return nsamps, samples_n
Helper function for calculating the number of samples Parameters ---------- res : :class:`~dynesty.results.Results` instance The :class:`~dynesty.results.Results` instance taken from a previous nested sampling run. Returns ------- nsamps: int The total number of samples samples_n: array Number of live points at a given iteration
386,239
def is_gene_list(bed_file): with utils.open_gzipsafe(bed_file) as in_handle: for line in in_handle: if not line.startswith(" if len(line.split()) == 1: return True else: return False
Check if the file is only a list of genes, not a BED
386,240
def is_decorated_with_property(node): if not node.decorators: return False for decorator in node.decorators.nodes: if not isinstance(decorator, astroid.Name): continue try: if _is_property_decorator(decorator): return True except astroid.InferenceError: pass return False
Check if the function is decorated as a property. :param node: The node to check. :type node: astroid.nodes.FunctionDef :returns: True if the function is a property, False otherwise. :rtype: bool
386,241
def cpustats(): * def linux_cpustats(): ret = {} try: with salt.utils.files.fopen(, ) as fp_: stats = salt.utils.stringutils.to_unicode(fp_.read()) except IOError: pass else: for line in stats.splitlines(): if not line: continue comps = line.split() if comps[0] == : ret[comps[0]] = {: _number(comps[4]), : _number(comps[5]), : _number(comps[6]), : _number(comps[2]), : _number(comps[7]), : _number(comps[8]), : _number(comps[3]), : _number(comps[1])} elif comps[0] == : ret[comps[0]] = {: _number(comps[1]), : [_number(x) for x in comps[2:]]} elif comps[0] == : ret[comps[0]] = {: _number(comps[1]), : [_number(x) for x in comps[2:]]} else: ret[comps[0]] = _number(comps[1]) return ret def freebsd_cpustats(): vmstat = __salt__[]().splitlines() vm0 = vmstat[0].split() cpu0loc = vm0.index() vm1 = vmstat[1].split() usloc = vm1.index() vm2 = vmstat[2].split() cpuctr = 0 ret = {} for cpu in vm0[cpu0loc:]: ret[cpu] = {: _number(vm2[usloc + 3 * cpuctr]), : _number(vm2[usloc + 1 + 3 * cpuctr]), : _number(vm2[usloc + 2 + 3 * cpuctr]), } cpuctr += 1 return ret def sunos_cpustats(): mpstat = __salt__[]().splitlines() fields = mpstat[0].split() ret = {} for cpu in mpstat: if cpu.startswith(): continue cpu = cpu.split() ret[_number(cpu[0])] = {} for i in range(1, len(fields)-1): ret[_number(cpu[0])][fields[i]] = _number(cpu[i]) return ret def aix_cpustats(): ret = {} ret[] = [] procn = None fields = [] for line in __salt__[]().splitlines(): if not line: continue procn = len(ret[]) if line.startswith(): comps = line.split() ret[].append({}) ret[][procn][] = {} cpu_comps = comps[1].split() for i in range(0, len(cpu_comps)): cpu_vals = cpu_comps[i].split() ret[][procn][][cpu_vals[0]] = cpu_vals[1] if line.startswith(): fields = line.split() continue if fields: cpustat = line.split() ret[_number(cpustat[0])] = {} for i in range(1, len(fields)-1): ret[_number(cpustat[0])][fields[i]] = _number(cpustat[i]) return ret def openbsd_cpustats(): systat = __salt__[]().splitlines() fields = systat[3].split() ret = {} for cpu in systat[4:]: cpu_line = cpu.split() cpu_idx = cpu_line[0] ret[cpu_idx] = {} for idx, field in enumerate(fields[1:]): ret[cpu_idx][field] = cpu_line[idx+1] return ret get_version = { : linux_cpustats, : freebsd_cpustats, : openbsd_cpustats, : sunos_cpustats, : aix_cpustats, } errmsg = return get_version.get(__grains__[], lambda: errmsg)()
Return the CPU stats for this minion .. versionchanged:: 2016.11.4 Added support for AIX .. versionchanged:: 2018.3.0 Added support for OpenBSD CLI Example: .. code-block:: bash salt '*' status.cpustats
386,242
def zDDEInit(self): self.pyver = _get_python_version() if _PyZDDE.liveCh==0: try: _PyZDDE.server = _dde.CreateServer() _PyZDDE.server.Create("ZCLIENT") except Exception as err: _sys.stderr.write("{}: DDE server may be in use!".format(str(err))) return -1 self.conversation = _dde.CreateConversation(_PyZDDE.server) try: self.conversation.ConnectTo(self.appName, " ") except Exception as err: _sys.stderr.write("{}.\nOpticStudio UI may not be running!\n".format(str(err))) self.zDDEClose() return -1 else: _PyZDDE.liveCh += 1 self.connection = True return 0
Initiates link with OpticStudio DDE server
386,243
def get(self, key): o = self.data[key]() if o is None: del self.data[key] raise CacheFault( "FinalizingCache has %r but its value is no more." % (key,)) log.msg(interface=iaxiom.IStatEvent, stat_cache_hits=1, key=key) return o
Get an entry from the cache by key. @raise KeyError: if the given key is not present in the cache. @raise CacheFault: (a L{KeyError} subclass) if the given key is present in the cache, but the value it points to is gone.
386,244
def dict_to_env(d, pathsep=os.pathsep): out_env = {} for k, v in d.iteritems(): if isinstance(v, list): out_env[k] = pathsep.join(v) elif isinstance(v, string_types): out_env[k] = v else: raise TypeError(.format(type(v))) return out_env
Convert a python dict to a dict containing valid environment variable values. :param d: Dict to convert to an env dict :param pathsep: Path separator used to join lists(default os.pathsep)
386,245
def grid(script, size=1.0, x_segments=1, y_segments=1, center=False, color=None): size = util.make_list(size, 2) filter_xml = .join([ , , .format(size[0]), , , , , .format(size[1]), , , , , .format(x_segments + 1), , , , , .format(y_segments + 1), , , , , , , , , ]) util.write_filter(script, filter_xml) if isinstance(script, FilterScript): script.add_layer(, change_layer=True) transform.vert_function(script, z_func=) if center: transform.translate(script, value=[size[0]/2, -size[1]/2, 0]) else: transform.translate(script, value=[size[0], 0, 0]) if color is not None: vert_color.function(script, color=color) return None
2D square/plane/grid created on XY plane x_segments # Number of segments in the X direction. y_segments # Number of segments in the Y direction. center="false" # If true square will be centered on origin; otherwise it is place in the positive XY quadrant.
386,246
def analyzeSweep(abf,sweep,m1=None,m2=None,plotToo=False): abf.setsweep(sweep) if m1 is None: m1=0 else: m1=m1*abf.pointsPerSec if m2 is None: m2=-1 else: m2=m2*abf.pointsPerSec Yorig=abf.sweepY[int(m1):int(m2)] X=np.arange(len(Yorig))/abf.pointsPerSec Ylpf=linear_gaussian(Yorig,sigmaSize=abf.pointsPerMs*300,forwardOnly=False) Yflat=Yorig-Ylpf EPSCs,IPSCs=[],[] if plotToo: plt.figure(figsize=(15,6)) ax1=plt.subplot(211) plt.title("%s sweep %d"%(abf.ID,sweep)) plt.grid() plt.plot(X,Yorig,alpha=.5) plt.plot(X,Ylpf,,alpha=.5,lw=2) plt.margins(0,.2) plt.subplot(212,sharex=ax1) plt.title("gaussian baseline subtraction") plt.grid() plt.plot(X,Yflat,alpha=.5) plt.axhline(0,color=,lw=2,alpha=.5) plt.tight_layout() plt.show() hist, bin_edges = np.histogram(Yflat, density=True, bins=200) peakPa=bin_edges[np.where(hist==max(hist))[0][0]+1] if plotToo: plt.figure() plt.grid() plt.plot(bin_edges[1:],hist,alpha=.5) plt.axvline(0,color=) plt.axvline(peakPa,color=,ls=,lw=2,alpha=.5) plt.semilogy() plt.title("sweep data distribution") plt.ylabel("power") plt.xlabel("pA deviation") plt.show() return peakPa
m1 and m2, if given, are in seconds. returns [# EPSCs, # IPSCs]
386,247
def record(self): if not self._initialized: raise pycdlibexception.PyCdlibInternalError() return b + struct.pack(, RRRERecord.length(), SU_ENTRY_VERSION)
Generate a string representing the Rock Ridge Relocated Directory record. Parameters: None. Returns: String containing the Rock Ridge record.
386,248
def FaultFromException(ex, inheader, tb=None, actor=None): tracetext = None if tb: try: lines = .join([ % (name, line, func) for name, line, func, text in traceback.extract_tb(tb)]) except: pass else: tracetext = lines exceptionName = "" try: exceptionName = ":".join([ex.__module__, ex.__class__.__name__]) except: pass elt = ZSIFaultDetail(string=exceptionName + "\n" + str(ex), trace=tracetext) if inheader: detail, headerdetail = None, elt else: detail, headerdetail = elt, None return Fault(Fault.Server, , actor, detail, headerdetail)
Return a Fault object created from a Python exception. <SOAP-ENV:Fault> <faultcode>SOAP-ENV:Server</faultcode> <faultstring>Processing Failure</faultstring> <detail> <ZSI:FaultDetail> <ZSI:string></ZSI:string> <ZSI:trace></ZSI:trace> </ZSI:FaultDetail> </detail> </SOAP-ENV:Fault>
386,249
def getoutputerror(cmd): out_err = process_handler(cmd, lambda p: p.communicate()) if out_err is None: return , out, err = out_err return py3compat.bytes_to_str(out), py3compat.bytes_to_str(err)
Return (standard output, standard error) of executing cmd in a shell. Accepts the same arguments as os.system(). Parameters ---------- cmd : str A command to be executed in the system shell. Returns ------- stdout : str stderr : str
386,250
def setMotorShutdown(self, value, device=DEFAULT_DEVICE_ID, message=True): return self._setMotorShutdown(value, device, message)
Set the motor shutdown on error status stored on the hardware device. :Parameters: value : `int` An integer indicating the effect on the motors when an error occurs. A `1` will cause the cause the motors to stop on an error and a `0` will ignore errors keeping the motors running. :Keywords: device : `int` The device is the integer number of the hardware devices ID and is only used with the Pololu Protocol. Defaults to the hardware's default value. message : `bool` If set to `True` a text message will be returned, if set to `False` the integer stored in the Qik will be returned. :Returns: Text message indicating the status of the shutdown error. A text message or an int. See the `message` parameter above. :Exceptions: * `SerialException` IO error indicating there was a problem reading from the serial connection.
386,251
def xpathNextChild(self, cur): if cur is None: cur__o = None else: cur__o = cur._o ret = libxml2mod.xmlXPathNextChild(self._o, cur__o) if ret is None:raise xpathError() __tmp = xmlNode(_obj=ret) return __tmp
Traversal function for the "child" direction The child axis contains the children of the context node in document order.
386,252
def to_json(value, pretty=False): options = { : False, : BasicJSONEncoder, } if pretty: options[] = 2 options[] = (, ) return json.dumps(value, **options)
Serializes the given value to JSON. :param value: the value to serialize :param pretty: whether or not to format the output in a more human-readable way; if not specified, defaults to ``False`` :type pretty: bool :rtype: str
386,253
def t_text(self, t): r t.lexer.text_start = t.lexer.lexpos - len() t.lexer.begin()
r':\s*<text>
386,254
def count(self, request, *args, **kwargs): self.queryset = self.filter_queryset(self.get_queryset()) return response.Response({: self.queryset.count()}, status=status.HTTP_200_OK)
To get a count of events - run **GET** against */api/events/count/* as authenticated user. Endpoint support same filters as events list. Response example: .. code-block:: javascript {"count": 12321}
386,255
def conn_gcp(cred, crid): gcp_auth_type = cred.get(, "S") if gcp_auth_type == "A": gcp_crd_ia = CONFIG_DIR + ".gcp_libcloud_a_auth." + cred[] gcp_crd = {: cred[], : cred[], : cred[], : "IA", : gcp_crd_ia} else: gcp_pem = CONFIG_DIR + cred[] gcp_crd_sa = CONFIG_DIR + ".gcp_libcloud_s_auth." + cred[] gcp_crd = {: cred[], : gcp_pem, : cred[], : gcp_crd_sa} driver = get_driver(Provider.GCE) try: gcp_obj = driver(**gcp_crd) except SSLError as e: abort_err("\r SSL Error with GCP: {}".format(e)) except (InvalidCredsError, ValueError) as e: abort_err("\r Error with GCP Credentials: {}".format(e)) return {crid: gcp_obj}
Establish connection to GCP.
386,256
def create_symlinks(d): data = loadJson(d) outDir = prepare_output(d) unseen = data["pages"].keys() while len(unseen) > 0: latest = work = unseen[0] while work in unseen: unseen.remove(work) if "prev" in data["pages"][work]: work = data["pages"][work]["prev"] print("Latest page: %s" % (latest)) order = [] work = latest while work in data["pages"]: order.extend(data["pages"][work]["images"].values()) if "prev" in data["pages"][work]: work = data["pages"][work]["prev"] else: work = None order.reverse() for i, img in enumerate(order): os.symlink(os.path.join(, img), os.path.join(outDir, % (i, img)))
Create new symbolic links in output directory.
386,257
def list_slot_differences_slot( self, resource_group_name, name, slot, target_slot, preserve_vnet, custom_headers=None, raw=False, **operation_config): slot_swap_entity = models.CsmSlotEntity(target_slot=target_slot, preserve_vnet=preserve_vnet) def internal_paging(next_link=None, raw=False): if not next_link: url = self.list_slot_differences_slot.metadata[] path_format_arguments = { : self._serialize.url("resource_group_name", resource_group_name, , max_length=90, min_length=1, pattern=r), : self._serialize.url("name", name, ), : self._serialize.url("slot", slot, ), : self._serialize.url("self.config.subscription_id", self.config.subscription_id, ) } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters[] = self._serialize.query("self.api_version", self.api_version, ) else: url = next_link query_parameters = {} header_parameters = {} header_parameters[] = header_parameters[] = if self.config.generate_client_request_id: header_parameters[] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters[] = self._serialize.header("self.config.accept_language", self.config.accept_language, ) body_content = self._serialize.body(slot_swap_entity, ) request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.DefaultErrorResponseException(self._deserialize, response) return response deserialized = models.SlotDifferencePaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.SlotDifferencePaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized
Get the difference in configuration settings between two web app slots. Get the difference in configuration settings between two web app slots. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str :param name: Name of the app. :type name: str :param slot: Name of the source slot. If a slot is not specified, the production slot is used as the source slot. :type slot: str :param target_slot: Destination deployment slot during swap operation. :type target_slot: str :param preserve_vnet: <code>true</code> to preserve Virtual Network to the slot during swap; otherwise, <code>false</code>. :type preserve_vnet: bool :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of SlotDifference :rtype: ~azure.mgmt.web.models.SlotDifferencePaged[~azure.mgmt.web.models.SlotDifference] :raises: :class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
386,258
def _suppress_distutils_logs(): f = distutils.log.Log._log def _log(log, level, msg, args): if level >= distutils.log.ERROR: f(log, level, msg, args) distutils.log.Log._log = _log yield distutils.log.Log._log = f
Hack to hide noise generated by `setup.py develop`. There isn't a good way to suppress them now, so let's monky-patch. See https://bugs.python.org/issue25392.
386,259
def close(self): if self.session is not None: self.session.cookies.clear() self.session.close() self.session = None
Close the current session, if still open.
386,260
def disable_ipython(self): from IPython.core.getipython import get_ipython self.ipython_enabled = False ip = get_ipython() formatter = ip.display_formatter.formatters[] formatter.type_printers.pop(Visualization, None) formatter.type_printers.pop(VisualizationLocal, None)
Disable plotting in the iPython notebook. After disabling, lightning plots will be produced in your lightning server, but will not appear in the notebook.
386,261
def api(self): api = getattr(self, , None) if api is None: self._api = mailjet.Api() return self._api
Get or create an Api() instance using django settings.
386,262
def estimate_from_ssr(histograms, readout_povm, channel_ops, settings): nqc = len(channel_ops[0].dims[0]) pauli_basis = grove.tomography.operator_utils.PAULI_BASIS ** nqc pi_basis = readout_povm.pi_basis if not histograms.shape[1] == pi_basis.dim: raise ValueError("Currently tomography is only implemented for two-level systems.") n_kj = np.asarray(histograms) c_jk_m = _prepare_c_jk_m(readout_povm, pauli_basis, channel_ops) rho_m = cvxpy.Variable(pauli_basis.dim) p_jk = c_jk_m * rho_m obj = -n_kj.ravel() * cvxpy.log(p_jk) p_jk_mat = cvxpy.reshape(p_jk, pi_basis.dim, len(channel_ops)) constraints = [ p_jk >= 0, np.matrix(np.ones((1, pi_basis.dim))) * p_jk_mat == 1, ] rho_m_real_imag = sum((rm * o_ut.to_realimag(Pm) for (rm, Pm) in ut.izip(rho_m, pauli_basis.ops)), 0) if POSITIVE in settings.constraints: if tomography._SDP_SOLVER.is_functional(): constraints.append(rho_m_real_imag >> 0) else: _log.warning("No convex solver capable of semi-definite problems installed.\n" "Dropping the positivity constraint on the density matrix.") if UNIT_TRACE in settings.constraints: constraints.append(rho_m[0, 0] == 1. / pauli_basis.ops[0].tr().real) prob = cvxpy.Problem(cvxpy.Minimize(obj), constraints) _log.info("Starting convex solver") prob.solve(solver=tomography.SOLVER, **settings.solver_kwargs) if prob.status != cvxpy.OPTIMAL: _log.warning("Problem did not converge to optimal solution. " "Solver settings: {}".format(settings.solver_kwargs)) return StateTomography(np.array(rho_m.value).ravel(), pauli_basis, settings)
Estimate a density matrix from single shot histograms obtained by measuring bitstrings in the Z-eigenbasis after application of given channel operators. :param numpy.ndarray histograms: The single shot histograms, `shape=(n_channels, dim)`. :param DiagognalPOVM readout_povm: The POVM corresponding to the readout plus classifier. :param list channel_ops: The tomography measurement channels as `qutip.Qobj`'s. :param TomographySettings settings: The solver and estimation settings. :return: The generated StateTomography object. :rtype: StateTomography
386,263
def get_resource_area(self, area_id, enterprise_name=None, organization_name=None): route_values = {} if area_id is not None: route_values[] = self._serialize.url(, area_id, ) query_parameters = {} if enterprise_name is not None: query_parameters[] = self._serialize.query(, enterprise_name, ) if organization_name is not None: query_parameters[] = self._serialize.query(, organization_name, ) response = self._send(http_method=, location_id=, version=, route_values=route_values, query_parameters=query_parameters) return self._deserialize(, response)
GetResourceArea. [Preview API] :param str area_id: :param str enterprise_name: :param str organization_name: :rtype: :class:`<ResourceAreaInfo> <azure.devops.v5_0.location.models.ResourceAreaInfo>`
386,264
def _setBitOn(x, bitNum): _checkInt(x, minvalue=0, description=) _checkInt(bitNum, minvalue=0, description=) return x | (1 << bitNum)
Set bit 'bitNum' to True. Args: * x (int): The value before. * bitNum (int): The bit number that should be set to True. Returns: The value after setting the bit. This is an integer. For example: For x = 4 (dec) = 0100 (bin), setting bit number 0 results in 0101 (bin) = 5 (dec).
386,265
def post(self): self.write(resultdict) self.finish()
This handles POST requests. Saves the changes made by the user on the frontend back to the current checkplot-list.json file.
386,266
def handle_webhook_event(self, environ, url, params): for handler in self.events["webhook"]: urlpattern = handler.event.args["urlpattern"] if not urlpattern or match(urlpattern, url): response = handler(self, environ, url, params) if response: return response
Webhook handler - each handler for the webhook event takes an initial pattern argument for matching the URL requested. Here we match the URL to the pattern for each webhook handler, and bail out if it returns a response.
386,267
def _prepare_executor(self, data, executor): logger.debug(__("Preparing executor for Data with id {}", data.id))
Copy executor sources into the destination directory. :param data: The :class:`~resolwe.flow.models.Data` object being prepared for. :param executor: The fully qualified name of the executor that is to be used for this data object. :return: Tuple containing the relative fully qualified name of the executor class ('relative' to how the executor will be run) and the path to the directory where the executor will be deployed. :rtype: (str, str)
386,268
def parse_expression(expression: str) -> Tuple[Set[str], List[CompositeAxis]]: identifiers = set() composite_axes = [] if in expression: if not in expression: raise EinopsError() if str.count(expression, ) != 1 or str.count(expression, ) != 3: raise EinopsError() expression = expression.replace(, _ellipsis) bracket_group = None def add_axis_name(x): if x is not None: if x in identifiers: raise ValueError(.format(x)) identifiers.add(x) if bracket_group is None: composite_axes.append([x]) else: bracket_group.append(x) current_identifier = None for char in expression: if char in + _ellipsis: add_axis_name(current_identifier) current_identifier = None if char == _ellipsis: if bracket_group is not None: raise EinopsError("Ellipsis can()Brackets are not balanced09t start with a digit") current_identifier += char elif <= char <= : if current_identifier is None: current_identifier = char else: current_identifier += char else: if <= char <= : raise EinopsError("Only lower-case latin letters allowed in names, not ".format(char)) raise EinopsError("Unknown character ".format(char)) if bracket_group is not None: raise EinopsError(.format(expression)) add_axis_name(current_identifier) return identifiers, composite_axes
Parses an indexing expression (for a single tensor). Checks uniqueness of names, checks usage of '...' (allowed only once) Returns set of all used identifiers and a list of axis groups
386,269
def apply_mutation(module_path, operator, occurrence): module_ast = get_ast(module_path, python_version=operator.python_version) original_code = module_ast.get_code() visitor = MutationVisitor(occurrence, operator) mutated_ast = visitor.walk(module_ast) mutated_code = None if visitor.mutation_applied: mutated_code = mutated_ast.get_code() with module_path.open(mode=, encoding=) as handle: handle.write(mutated_code) handle.flush() return original_code, mutated_code
Apply a specific mutation to a file on disk. Args: module_path: The path to the module to mutate. operator: The `operator` instance to use. occurrence: The occurrence of the operator to apply. Returns: A `(unmutated-code, mutated-code)` tuple to the with-block. If there was no mutation performed, the `mutated-code` is `None`.
386,270
def parse_rcfile(rcfile): def parse_bool(value): value = value.lower() if value in [, ]: return True elif value in [, ]: return False else: raise ValueError(t parse {}sizecommenttemplatereverseoppositeposition file=sys.stderr) return params
Parses rcfile Invalid lines are ignored with a warning
386,271
def dropped(self, param, event): if event.source() == self or isinstance(param, AddLabel): index = self.indexAt(event.pos()) self.model().insertRows(index.row(),1) if event.source() == self: self.model().setData(index, param) else: self.hintRequested.emit() row = index.row() if row == -1: row = self.model().rowCount() - 1 self.selectRow(row) self.parameterChanged.emit(self.model().selection(index)) self.dragActive.emit(False)
Adds the dropped parameter *param* into the protocol list. Re-implemented from :meth:`AbstractDragView<sparkle.gui.abstract_drag_view.AbstractDragView.dropped>`
386,272
def call(self, command, *args): command = self._normalize_command_name(command) args = self._normalize_command_args(command, *args) redis_function = getattr(self, command) value = redis_function(*args) return self._normalize_command_response(command, value)
Sends call to the function, whose name is specified by command. Used by Script invocations and normalizes calls using standard Redis arguments to use the expected redis-py arguments.
386,273
def get_assessment_offered_query_session_for_bank(self, bank_id): if not self.supports_assessment_offered_query(): raise errors.Unimplemented() return sessions.AssessmentOfferedQuerySession(bank_id, runtime=self._runtime)
Gets the ``OsidSession`` associated with the assessment offered query service for the given bank. arg: bank_id (osid.id.Id): the ``Id`` of the bank return: (osid.assessment.AssessmentOfferedQuerySession) - an ``AssessmentOfferedQuerySession`` raise: NotFound - ``bank_id`` not found raise: NullArgument - ``bank_id`` is ``null`` raise: OperationFailed - ``unable to complete request`` raise: Unimplemented - ``supports_assessment_offered_query()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_assessment_offered_query()`` and ``supports_visible_federation()`` are ``true``.*
386,274
def get_vocab(self, vocab_name, **kwargs): vocab_dict = self.__get_vocab_dict__(vocab_name, **kwargs) filepaths = list(set([os.path.join(self.cache_dir, vocab_dict[]), os.path.join(self.vocab_dir, vocab_dict[])])) for path in filepaths: if os.path.exists(path): with open(path, ) as f_obj: vocab_dict.update({"name": vocab_name, "data": f_obj.read(), "modified": os.path.getmtime(path)}) return vocab_dict download_locs = make_list(vocab_dict.get(,[])) for loc in download_locs: loc_web = urllib.request.urlopen(loc) urllib.request.urlretrieve(loc, filepaths[0]) with open(filepaths[0], ) as f_obj: vocab_dict.update({"name": vocab_name, "data": f_obj.read(), "modified": os.path.getmtime(filepaths[0])}) return vocab_dict
Returns data stream of an rdf vocabulary args: vocab_name: the name or uri of the vocab to return
386,275
def produce(self, **kwargs): produce_args = self._produce_params.copy() produce_args.update(kwargs) if self._class: return getattr(self.instance, self.produce_method)(**produce_args) produce_args.update(self._hyperparameters) return self.primitive(**produce_args)
Call the primitive function, or the predict method of the primitive. The given keyword arguments will be passed directly to the primitive, if it is a simple function, or to the `produce` method of the primitive instance specified in the JSON annotation, if it is a class. If any of the arguments expected by the fit method had been given during the MLBlock initialization, they will be passed as well. Returns: The output of the call to the primitive function or primitive produce method.
386,276
def parse_sdk_name(name): if all(part.isdigit() for part in name.split(, 2)): return DOWNLOAD_URL % name url = urlparse.urlparse(name) if url.scheme: return name return os.path.abspath(name)
Returns a filename or URL for the SDK name. The name can be a version string, a remote URL or a local path.
386,277
def set_properties(self, properties, recursive=True): if not properties: return return self._accessor.set_properties(self, properties, recursive)
Adds new or modifies existing properties listed in properties properties - is a dict which contains the property names and values to set. Property values can be a list or tuple to set multiple values for a key. recursive - on folders property attachment is recursive by default. It is possible to force recursive behavior.
386,278
def galleries(self): api_version = self._get_api_version() if api_version == : from .v2018_06_01.operations import GalleriesOperations as OperationClass elif api_version == : from .v2019_03_01.operations import GalleriesOperations as OperationClass else: raise NotImplementedError("APIVersion {} is not available".format(api_version)) return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
Instance depends on the API version: * 2018-06-01: :class:`GalleriesOperations<azure.mgmt.compute.v2018_06_01.operations.GalleriesOperations>` * 2019-03-01: :class:`GalleriesOperations<azure.mgmt.compute.v2019_03_01.operations.GalleriesOperations>`
386,279
def qteBindKeyWidget(self, keysequence, macroName: str, widgetObj: QtGui.QWidget): keysequence = QtmacsKeysequence(keysequence) if not hasattr(widgetObj, ): msg = msg += raise QtmacsOtherError(msg) if not self.qteIsMacroRegistered(macroName): msg = ( .format(macroName)) self.qteLogger.error(msg, stack_info=True) return False try: widgetObj._qteAdmin.keyMap.qteInsertKey(keysequence, macroName) except AttributeError: msg = self.qteLogger.error(msg, stack_info=True) return False return True
Bind ``macroName`` to ``widgetObj`` and associate it with ``keysequence``. This method does not affect the key bindings of other applets and/or widgets and can be used to individualise the key bindings inside every applet instance and every widget inside that instance. Even multiple instances of the same applet type (eg. multiple text buffers) can all have individual key bindings. The ``keysequence`` can be specified either as a string (eg '<ctrl>+x <ctrl>+f'), or a list of tuples containing the constants from the ``QtCore.Qt`` name space (eg. [(ControlModifier, Key_X), (ControlModifier, Key_F)]), or as a ``QtmacsKeysequence`` object. |Args| * ``keysequence`` (**str**, **list** of **tuples**, **QtmacsKeysequence**): key sequence to activate ``macroName`` for specified ``widgetSignature``. * ``macroName`` (**str**): the macro to associated with ``keysequence``. * ``widgetObj`` (**QWidget**): determines which widgets signature to use. |Returns| * **bool**: whether or not at least one widget was successfully bound. |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. * **QtmacsKeysequenceError** if the provided ``keysequence`` could not be parsed. * **QtmacsOtherError** if ``widgetObj`` was not added with ``qteAddWidget``.
386,280
def randomColor( self ): r = random.randint(120, 180) g = random.randint(120, 180) b = random.randint(120, 180) return QColor(r, g, b)
Generates a random color. :return <QColor>
386,281
def log_likelihood(self,x, K_extra=1): x = np.asarray(x) ks = self._get_occupied() K = len(ks) K_total = K + K_extra obs_distns = [] for k in range(K): o = copy.deepcopy(self.obs_distn) o.resample(data=self._get_data_withlabel(k)) obs_distns.append(o) for k in range(K_extra): o = copy.deepcopy(self.obs_distn) o.resample() obs_distns.append(o) weights = Categorical(alpha_0=self.alpha_0, K=K_total, weights=None) assert len(self.labels_list) == 1 weights.resample(data=self.labels_list[0].z) vals = np.empty((x.shape[0],K_total)) for k in range(K_total): vals[:,k] = obs_distns[k].log_likelihood(x) vals += weights.log_likelihood(np.arange(K_total)) assert not np.isnan(vals).any() return logsumexp(vals,axis=1).sum()
Estimate the log likelihood with samples from the model. Draw k_extra components which were not populated by the current model in order to create a truncated approximate mixture model.
386,282
def DbUnExportServer(self, argin): self._log.debug("In DbUnExportServer()") self.db.unexport_server(argin)
Mark all devices belonging to a specified device server process as non exported :param argin: Device server name (executable/instance) :type: tango.DevString :return: :rtype: tango.DevVoid
386,283
def convolve_spatial3(im, psfs, mode="constant", grid_dim=None, sub_blocks=None, pad_factor=2, plan=None, return_plan=False, verbose=False): ndim = im.ndim if ndim != 3: raise ValueError("wrong dimensions of input!") if grid_dim: if psfs.shape != im.shape: raise ValueError("if grid_dim is set, then im.shape = hs.shape !") else: if not psfs.ndim == 2 * ndim: raise ValueError("wrong dimensions of psf grid! should be (Gz,Gy,Gx,Nz,Ny,Nx)") if grid_dim: Gs = grid_dim else: Gs = psfs.shape[:ndim] if not np.all([n % g == 0 for n, g in zip(im.shape, Gs)]): raise NotImplementedError( "shape of image has to be divisible by Gx Gy = %s shape mismatch" % (str(psfs.shape[:2]))) if sub_blocks == None: return _convolve_spatial3(im, psfs, mode=mode, pad_factor=pad_factor, plan=plan, return_plan=return_plan, grid_dim=grid_dim) else: if not np.all([g % n == 0 for n, g in zip(sub_blocks, Gs)]): raise ValueError("psf grid dimension has to be divisible corresponding n_blocks") N_sub = [n // s for n, s in zip(im.shape, sub_blocks)] Nblocks = [n // g for n, g in zip(im.shape, Gs)] Npads = [n * (s > 1) for n, s in zip(Nblocks, sub_blocks)] grid_dim_sub = [g // s + 2 * (s > 1) for g, s in zip(Gs, sub_blocks)] if grid_dim: res = np.empty(im.shape, np.float32) plan = None for i, ((im_tile, im_s_src, im_s_dest), (hs_tile, hs_s_src, hs_s_dest)) \ in enumerate(zip(tile_iterator(im, blocksize=N_sub, padsize=Npads, mode=mode, verbose=verbose), \ tile_iterator(psfs, blocksize=N_sub, padsize=Npads, mode=mode, verbose=verbose ))): if verbose: print("convolve_spatial3 ... %s\t/ %s" % (i + 1, np.prod(sub_blocks))) res_tile, plan = _convolve_spatial3(im_tile.copy(), hs_tile.copy(), mode=mode, pad_factor=pad_factor, return_plan=True, plan=plan, grid_dim=grid_dim_sub) res[im_s_src] = res_tile[im_s_dest] return res else: raise NotImplementedError("sub_blocks only implemented for Flatmode")
GPU accelerated spatial varying convolution of an 3d image with a (Gz, Gy, Gx) grid of psfs assumed to be equally spaced within the image the input image im is subdivided into (Gz, Gy,Gx) blocks, each block is convolved with the corresponding psf and linearly interpolated to give the final result The psfs can be given either in A) Stackmode psfs.shape = (Gz, Gy, Gx, Hz, Hy, Hx) then psfs[k,j,i] is the psf at the center of each block (i,j,k) in the image B) Flatmode psfs.shape = im.shape then the psfs are assumed to be definied on the gridpoints of the images itself in this case grid_dim = (Gz,Gy,Gx) has to be given as of now each image dimension has to be divisible by the grid dim, i.e. :: Nx % Gx == 0 Ny % Gy == 0 Nz % Gz == 0 GPU Memory consumption is of order 8*Nx*Ny*Nz If not enough GPU memory is available, consider using sub_blocks = (n,m,l) then the operation is carried out in a tiled fashion reducing memory consumption to 8*Nx*Ny*(1/n+2/Gx)*(1/m+2/Gy)*(1/l+2/Gz) (so there is no much use if n>Gx/2...) Example ------- im = np.zeros((64,64,64)) im[::10,::10,::10] = 1. # Stackmode psfs = np.ones((8,8,8,4,4,4)) res = convolve_spatial3(im, psfs, mode = "wrap") # Flatmode _Xs = np.meshgrid(*(np.arange(64),)*2) psfs = np.prod([np.clip(np.sin(2*np.pi*_X/8),0,1) for _X in _Xs],axis=0) res = convolve_spatial2(im, psfs, grid_dim = (16,16,16)) Parameters ---------- im: ndarray the image to convolve psfs: ndarray the (Gx,Gy) psf grid, either of shape (Gx,Gy, Hy, Hx) or im.shape mode: string, optional padding mode, either "constant" or "wrap" grid_dim: tuple, optional the (Gy,Gx) grid dimension, has to be provided if psfs.shape = im.shape sub_blocks: tuple, optional tiling mode, give e.g. (2,2) to sequentially operate on quadratnts pad_factor: int the factor of its size each block get tiled, use pad_factor=2 if the psfs are well localized, use pad_factor = 3 if not (e.g. if you experience blocking)_ plan: fft_plan, optional when given use this as the fft plan return_plan: bool, optional return (res, plan) with plan being the fft plan for further use Returns ------- res: ndarray the convolved image
386,284
def slice_bounds_by_doubling(x_initial, target_log_prob, log_slice_heights, max_doublings, step_size, seed=None, name=None): with tf.compat.v1.name_scope( name, , [x_initial, log_slice_heights, max_doublings, step_size]): seed_gen = distributions.SeedStream(seed, salt=) x_initial = tf.convert_to_tensor(value=x_initial) batch_shape = tf.shape(input=x_initial) dtype = step_size.dtype.base_dtype left_endpoints = x_initial + step_size * tf.random.uniform( batch_shape, minval=-1.0, maxval=0.0, dtype=dtype, seed=seed_gen()) left_increments, widths = _left_doubling_increments( batch_shape, max_doublings, step_size, seed=seed_gen()) left_endpoints -= left_increments right_endpoints = left_endpoints + widths left_ep_values = tf.map_fn(target_log_prob, left_endpoints) right_ep_values = tf.map_fn(target_log_prob, right_endpoints) left_ok = left_ep_values < log_slice_heights right_ok = right_ep_values < log_slice_heights both_ok = left_ok & right_ok both_ok_f = tf.reshape(both_ok, [max_doublings + 1, -1]) best_interval_idx = _find_best_interval_idx( tf.cast(both_ok_f, dtype=tf.int32)) point_index_gather = tf.stack( [best_interval_idx, tf.range(tf.size(input=best_interval_idx))], axis=1, name=) left_ep_f = tf.reshape(left_endpoints, [max_doublings + 1, -1]) right_ep_f = tf.reshape(right_endpoints, [max_doublings + 1, -1]) lower_bounds = tf.reshape(tf.gather_nd(left_ep_f, point_index_gather), batch_shape) upper_bounds = tf.reshape(tf.gather_nd(right_ep_f, point_index_gather), batch_shape) both_ok = tf.reduce_any(input_tensor=both_ok, axis=0) return upper_bounds, lower_bounds, both_ok
Returns the bounds of the slice at each stage of doubling procedure. Precomputes the x coordinates of the left (L) and right (R) endpoints of the interval `I` produced in the "doubling" algorithm [Neal 2003][1] P713. Note that we simultaneously compute all possible doubling values for each chain, for the reason that at small-medium densities, the gains from parallel evaluation might cause a speed-up, but this will be benchmarked against the while loop implementation. Args: x_initial: `tf.Tensor` of any shape and any real dtype consumable by `target_log_prob`. The initial points. target_log_prob: A callable taking a `tf.Tensor` of shape and dtype as `x_initial` and returning a tensor of the same shape. The log density of the target distribution. log_slice_heights: `tf.Tensor` with the same shape as `x_initial` and the same dtype as returned by `target_log_prob`. The log of the height of the slice for each chain. The values must be bounded above by `target_log_prob(x_initial)`. max_doublings: Scalar positive int32 `tf.Tensor`. The maximum number of doublings to consider. step_size: `tf.Tensor` with same dtype as and shape compatible with `x_initial`. The size of the initial interval. seed: (Optional) positive int. The random seed. If None, no seed is set. name: Python `str` name prefixed to Ops created by this function. Default value: `None` (i.e., 'find_slice_bounds'). Returns: upper_bounds: A tensor of same shape and dtype as `x_initial`. Slice upper bounds for each chain. lower_bounds: A tensor of same shape and dtype as `x_initial`. Slice lower bounds for each chain. both_ok: A tensor of shape `x_initial` and boolean dtype. Indicates if both the chosen upper and lower bound lie outside of the slice. #### References [1]: Radford M. Neal. Slice Sampling. The Annals of Statistics. 2003, Vol 31, No. 3 , 705-767. https://projecteuclid.org/download/pdf_1/euclid.aos/1056562461
386,285
def pretty_repr(instance): instance_type = type(instance) if not is_registered( instance_type, check_superclasses=True, check_deferred=True, register_deferred=True ): warnings.warn( "pretty_repr is assigned as the __repr__ method of " ". However, no pretty printer is registered for that type, " "its superclasses or its subclasses. Falling back to the default " "repr implementation. To fix this warning, register a pretty " "printer using prettyprinter.register_pretty.".format( instance_type.__qualname__ ), UserWarning ) return object.__repr__(instance) return pformat(instance)
A function assignable to the ``__repr__`` dunder method, so that the ``prettyprinter`` definition for the type is used to provide repr output. Usage: .. code:: python from prettyprinter import pretty_repr class MyClass: __repr__ = pretty_repr
386,286
def _dpi(self, resolution_tag): ifd_entries = self._ifd_entries if resolution_tag not in ifd_entries: return 72 resolution_unit = ( ifd_entries[TIFF_TAG.RESOLUTION_UNIT] if TIFF_TAG.RESOLUTION_UNIT in ifd_entries else 2 ) if resolution_unit == 1: return 72 units_per_inch = 1 if resolution_unit == 2 else 2.54 dots_per_unit = ifd_entries[resolution_tag] return int(round(dots_per_unit * units_per_inch))
Return the dpi value calculated for *resolution_tag*, which can be either TIFF_TAG.X_RESOLUTION or TIFF_TAG.Y_RESOLUTION. The calculation is based on the values of both that tag and the TIFF_TAG.RESOLUTION_UNIT tag in this parser's |_IfdEntries| instance.
386,287
def get_purchase(self, purchase_id, purchase_key=): data = {: purchase_id, : purchase_key} return self.api_get(, data)
Retrieve information about a purchase using the system's unique ID or a client's ID @param id_: a string that represents a unique_id or an extid. @param key: a string that is either 'sid' or 'extid'.
386,288
def setup_logger(debug, color): if debug: log_level = logging.DEBUG else: log_level = logging.INFO logger = logging.getLogger() stream = Handler(log_level, debug, color) logger.addHandler(stream) logger.setLevel(log_level)
Configure the logger.
386,289
def from_graph(cls, graph, linear_energy_ranges, quadratic_energy_ranges): get_env().enable_infix_notation = True theta = cls.empty(dimod.SPIN) theta.add_offset(Symbol(, REAL)) def Linear(v): bias = Symbol(.format(v), REAL) min_, max_ = linear_energy_ranges[v] theta.assertions.add(LE(bias, limitReal(max_))) theta.assertions.add(GE(bias, limitReal(min_))) return bias def Quadratic(u, v): bias = Symbol(.format(u, v), REAL) if (v, u) in quadratic_energy_ranges: min_, max_ = quadratic_energy_ranges[(v, u)] else: min_, max_ = quadratic_energy_ranges[(u, v)] theta.assertions.add(LE(bias, limitReal(max_))) theta.assertions.add(GE(bias, limitReal(min_))) return bias for v in graph.nodes: theta.add_variable(v, Linear(v)) for u, v in graph.edges: theta.add_interaction(u, v, Quadratic(u, v)) return theta
Create Theta from a graph and energy ranges. Args: graph (:obj:`networkx.Graph`): Provides the structure for Theta. linear_energy_ranges (dict): A dict of the form {v: (min, max), ...} where min and max are the range of values allowed to v. quadratic_energy_ranges (dict): A dict of the form {(u, v): (min, max), ...} where min and max are the range of values allowed to (u, v). Returns: :obj:`.Theta`
386,290
def save(self): if self.compression and hasattr(self.path_or_buf, ): msg = ("compression has no effect when passing file-like " "object as input.") warnings.warn(msg, RuntimeWarning, stacklevel=2) is_zip = isinstance(self.path_or_buf, ZipFile) or ( not hasattr(self.path_or_buf, ) and self.compression == ) if is_zip: self.path_or_buf.write(buf) else: f, handles = _get_handle(self.path_or_buf, self.mode, encoding=self.encoding, compression=self.compression) f.write(buf) close = True if close: f.close() for _fh in handles: _fh.close()
Create the writer & save
386,291
def access_token(): client = Client.query.filter_by( client_id=request.form.get() ).first() if not client: abort(404) if not client.is_confidential and \ == request.form.get(): error = InvalidClientError() response = jsonify(dict(error.twotuples)) response.status_code = error.status_code abort(response) return None
Token view handles exchange/refresh access tokens.
386,292
def camelcase_underscore(name): s1 = re.sub(, r, name) return re.sub(, r, s1).lower()
Convert camelcase names to underscore
386,293
def pull(i): o=i.get(,) xrecache=False pp=[] px=i.get(,) t=i.get(,) url=i.get(,) stable=i.get(,) version=i.get(,) if stable==: version= branch=i.get(,) checkout=i.get(,) ip=i.get(,) cr=i.get(,[]) tt= if i.get(,)==: tt= if px!=: pp.append({:px, :t, :url}) uoa=i.get(,) cids=i.get(,[]) if len(cids)>0 and uoa==: uoa=cids[0] if url!= and uoa== and px==: ix=url.rfind() if ix>0: uoa=url[ix+1:] if uoa.endswith(): uoa=uoa[:-4] i[]=uoa if uoa== and len(pp)==0 and url==: uoa= if uoa!=: if uoa.find()>=0 or uoa.find()>=0: r=ck.list_data({:work[], :uoa}) if r[]>0: return r lst=r[] for q in lst: r=ck.access({:, :work[], :q[], :}) if r[]>0: return r d=r[] t=d.get(,) duoa=r[] if d.get(,)==: xrecache=True if t!=: p=d.get(,) url=d.get(,) checkouts=d.get(,{}) pp.append({:p, :t, :url, :duoa, :checkouts}) else: r=ck.access({:, :work[], :uoa, :}) if r[]>0: if r[]==16: i[]= i[]= x=i.get(,) if x==: x= i[]=x i[]=cr return add(i) else: return r d=r[] duoa=r[] if d.get(,)==: xrecache=True p=d[] t=d.get(,) url=d.get(,) checkouts=d.get(,{}) pp.append({:p, :t, :url, :duoa, :checkouts}) for q in pp: p=q.get(,) duoa=q.get(,) t=q.get(,) url=q.get(,) if i.get(,)==: url=url.replace(,) j=url.find() if j>0: url=url[:j]++url[j+1:] url+= if o== and tt!=: ck.out() ck.out(+duoa+) ck.out() ck.out(+p) ck.out(+url) if t==: rq=ck.gen_tmp_file({}) if rq[]>0: return rq xfn=rq[] os.system(+xfn) rq=ck.load_text_file({:xfn, :}) xs= if rq[]==0: xs=rq[].strip() if xs.find()<0: return{:1, :} try: px=os.getcwd() except OSError: from os.path import expanduser px=expanduser("~") if not os.path.isdir(p): os.makedirs(p) if o==: ck.out() ck.out(+p) os.chdir(p) r=0 if ip!=: s=ck.cfg[][t][tt].replace(, url).replace(, p) if o==: ck.out(+s) ck.out() r=os.system(s) if o==: ck.out() os.chdir(px) if r>0: if o==: ck.out() ck.out(+str(r)+) ck.out() rx=ck.inp({: }) x=rx[].lower() if x== or x==: return {:1, :+str(r)} else: return {:1, :+str(r)} else: if o==: ck.out() if tt!=: if o==: ck.out() ck.out() ck.out() r=deps({:p, :cr, :, :version, :branch, :checkout, :o}) if r[]>0: return r if xrecache: if o==: ck.out() ck.out() ck.out() r=recache({:o}) if r[]>0: return r return {:0}
Input: { (path) - repo UOA (where to create entry) (type) - type (url) - URL or (data_uoa) - repo UOA (clone) - if 'yes', clone repo instead of update (current_repos) - if resolving dependencies on other repos, list of repos being updated (to avoid infinite recursion) (git) - if 'yes', use git protocol instead of https (ignore_pull) - useful just for switching to another branch (stable) - take stable version (highly experimental) (version) - checkout version (default - stable) (branch) - git branch (checkout) - git checkout } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 }
386,294
def register_action(self, name, **kwargs): settings = foundations.data_structures.Structure(**{"parent": None, "text": None, "icon": None, "icon_text": None, "checkable": None, "checked": None, "status_tip": None, "whats_this": None, "tool_tip": None, "shortcut": None, "shortcut_context": None, "slot": None}) settings.update(kwargs) name = self.__normalize_name(name) category = foundations.namespace.get_namespace(name) name = foundations.namespace.remove_namespace(name) action = QAction(name, settings.parent or self) self.add_to_category(category, name, action) settings.text and action.setText(settings.text) settings.icon and action.setIcon(settings.icon) settings.icon_text and action.setIconText(settings.icon_text) settings.checkable and action.setCheckable(settings.checkable) settings.checked and action.set_checked(settings.checked) settings.status_tip and action.setStatusTip(settings.status_tip) settings.whats_this and action.setWhatsThis(settings.whats_this) settings.tool_tip and action.setToolTip(settings.tool_tip) settings.shortcut and action.setShortcut(QKeySequence(settings.shortcut)) settings.shortcut_context and action.setShortcutContext(settings.shortcut_context) if settings.slot: self.__actions_signals_slots[action] = settings.slot action.triggered.connect(settings.slot) return action
Registers given action name, optional arguments like a parent, icon, slot etc ... can be given. :param name: Action to register. :type name: unicode :param \*\*kwargs: Keywords arguments. :type \*\*kwargs: \*\* :return: Action. :rtype: QAction
386,295
def _retrieveRegions(self): self.sensors = [] self.coarseSensors = [] self.locationInputs = [] self.L4Columns = [] self.L2Columns = [] self.L5Columns = [] self.L6Columns = [] for i in xrange(self.numColumns): self.sensors.append( self.network.regions["sensorInput_" + str(i)].getSelf() ) self.coarseSensors.append( self.network.regions["coarseSensorInput_" + str(i)].getSelf() ) self.locationInputs.append( self.network.regions["locationInput_" + str(i)].getSelf() ) self.L4Columns.append( self.network.regions["L4Column_" + str(i)].getSelf() ) self.L2Columns.append( self.network.regions["L2Column_" + str(i)].getSelf() ) self.L5Columns.append( self.network.regions["L5Column_" + str(i)].getSelf() ) self.L6Columns.append( self.network.regions["L6Column_" + str(i)].getSelf() )
Retrieve and store Python region instances for each column
386,296
def load_all(self, group): for ep in iter_entry_points(group=group): plugin = ep.load() plugin(self.__config)
Loads all plugins advertising entry points with the given group name. The specified plugin needs to be a callable that accepts the everest configurator as single argument.
386,297
def salvar(self, destino=None, prefix=, suffix=): if destino: if os.path.exists(destino): raise IOError((errno.EEXIST, , destino,)) destino = os.path.abspath(destino) fd = os.open(destino, os.O_EXCL|os.O_CREAT|os.O_WRONLY) else: fd, destino = tempfile.mkstemp(prefix=prefix, suffix=suffix) os.write(fd, self.conteudo()) os.fsync(fd) os.close(fd) return os.path.abspath(destino)
Salva o arquivo de log decodificado. :param str destino: (Opcional) Caminho completo para o arquivo onde os dados dos logs deverão ser salvos. Se não informado, será criado um arquivo temporário via :func:`tempfile.mkstemp`. :param str prefix: (Opcional) Prefixo para o nome do arquivo. Se não informado será usado ``"tmp"``. :param str suffix: (Opcional) Sufixo para o nome do arquivo. Se não informado será usado ``"-sat.log"``. :return: Retorna o caminho completo para o arquivo salvo. :rtype: str :raises IOError: Se o destino for informado e o arquivo já existir.
386,298
def port_str_arrange(ports): b_tcp = ports.find("T") b_udp = ports.find("U") if (b_udp != -1 and b_tcp != -1) and b_udp < b_tcp: return ports[b_tcp:] + ports[b_udp:b_tcp] return ports
Gives a str in the format (always tcp listed first). T:<tcp ports/portrange comma separated>U:<udp ports comma separated>
386,299
def to_phase(self, time, component=None, t0=, **kwargs): if kwargs.get(, False): raise ValueError("support for phshift was removed as of 2.1. Please pass t0 instead.") ephem = self.get_ephemeris(component=component, t0=t0, **kwargs) if isinstance(time, list): time = np.array(time) elif isinstance(time, Parameter): time = time.get_value(u.d) elif isinstance(time, str): time = self.get_value(time, u.d) t0 = ephem.get(, 0.0) period = ephem.get(, 1.0) dpdt = ephem.get(, 0.0) if dpdt != 0: phase = np.mod(1./dpdt * np.log(period + dpdt*(time-t0)), 1.0) else: phase = np.mod((time-t0)/period, 1.0) if isinstance(phase, float): if phase > 0.5: phase -= 1 else: phase[phase > 0.5] -= 1 return phase
Get the phase(s) of a time(s) for a given ephemeris :parameter time: time to convert to phases (should be in same system as t0s) :type time: float, list, or array :parameter t0: qualifier of the parameter to be used for t0 :type t0: str :parameter str component: component for which to get the ephemeris. If not given, component will default to the top-most level of the current hierarchy :parameter **kwargs: any value passed through kwargs will override the ephemeris retrieved by component (ie period, t0, dpdt). Note: be careful about units - input values will not be converted. :return: phase (float) or phases (array)