Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
380,700
def receive_offer(self, pkt): logger.debug("C2. Received OFFER?, in SELECTING state.") if isoffer(pkt): logger.debug("C2: T, OFFER received") self.offers.append(pkt) if len(self.offers) >= MAX_OFFERS_COLLECTED: logger.debug("C2.5: T, raise REQUESTING.") self.select_offer() raise self.REQUESTING() logger.debug("C2.5: F, raise SELECTING.") raise self.SELECTING()
Receive offer on SELECTING state.
380,701
def get_changes(self, factory_name, global_factory=False, resources=None, task_handle=taskhandle.NullTaskHandle()): if resources is None: resources = self.project.get_python_files() changes = ChangeSet( % factory_name) job_set = task_handle.create_jobset(, len(resources)) self._change_module(resources, changes, factory_name, global_factory, job_set) return changes
Get the changes this refactoring makes `factory_name` indicates the name of the factory function to be added. If `global_factory` is `True` the factory will be global otherwise a static method is added to the class. `resources` can be a list of `rope.base.resource.File`\s that this refactoring should be applied on; if `None` all python files in the project are searched.
380,702
def list_checks(ruleset, ruleset_file, debug, json, skip, tag, verbose, checks_paths): if ruleset and ruleset_file: raise click.BadOptionUsage( "Options and cannot be used together.") try: if not debug: logging.basicConfig(stream=six.StringIO()) log_level = _get_log_level(debug=debug, verbose=verbose) checks = get_checks(ruleset_name=ruleset, ruleset_file=ruleset_file, logging_level=log_level, tags=tag, checks_paths=checks_paths, skips=skip) _print_checks(checks=checks) if json: AbstractCheck.save_checks_to_json(file=json, checks=checks) except ColinException as ex: logger.error("An error occurred: %r", ex) if debug: raise else: raise click.ClickException(str(ex)) except Exception as ex: logger.error("An error occurred: %r", ex) if debug: raise else: raise click.ClickException(str(ex))
Print the checks.
380,703
def _build_conflict_target(self): conflict_target = [] if not isinstance(self.query.conflict_target, list): raise SuspiciousOperation(( ) % str(self.query.conflict_target)) def _assert_valid_field(field_name): field_name = self._normalize_field_name(field_name) if self._get_model_field(field_name): return raise SuspiciousOperation(( ) % str(field_name)) for field_name in self.query.conflict_target: _assert_valid_field(field_name) if isinstance(field_name, tuple): conflict_target.append( %s\ % ( self._format_field_name(field_name), field_name[1] ) ) else: conflict_target.append( self._format_field_name(field_name)) return % .join(conflict_target)
Builds the `conflict_target` for the ON CONFLICT clause.
380,704
def utc2local(date): date_offset = (datetime.now() - datetime.utcnow()) date_offset = (date_offset.microseconds + (date_offset.seconds + date_offset.days * 24 * 3600) * 1e6) / 1e6 date_offset = int(round(date_offset / 60 / 60)) return date + timedelta(hours=date_offset)
DokuWiki returns date with a +0000 timezone. This function convert *date* to the local time.
380,705
def render_surface_function(surfimg, funcimg=None, alphasurf=0.2, alphafunc=1.0, isosurf=0.5, isofunc=0.5, smoothsurf=None, smoothfunc=None, cmapsurf=, cmapfunc=, filename=None, notebook=False, auto_open=False): cmap_dict = { : , : , : , : , : } if surfimg.dimension != 3: raise ValueError() if notebook: init_notebook_mode(connected=True) fig_list = [] fig_data_list = [] surfimg = resample_image(surfimg, (3,3,3)) surfimg_arr = surfimg.numpy() surfverts, surffaces,_,_ = skimage.measure.marching_cubes_lewiner(surfimg_arr, isosurf, spacing=(1,1,1)) surffig = FF.create_trisurf(x=surfverts[:,0], y=surfverts[:,1], z=surfverts[:,2], colormap=cmap_dict.get(cmapsurf, cmapsurf), plot_edges=False, simplices=surffaces) surffig[][0].update(opacity=alphasurf) fig_list.append(surffig) fig_data_list.append(surffig.data[0]) if funcimg is not None: if not isinstance(funcimg, (tuple,list)): funcimg = [funcimg] if not isinstance(alphafunc, (tuple,list)): alphafunc = [alphafunc]*len(funcimg) if not isinstance(isofunc, (tuple,list)): isofunc = [isofunc]*len(funcimg) if not isinstance(cmapfunc, (tuple,list)): cmapfunc = [cmapfunc]*len(funcimg) for i in range(len(cmapfunc)): cmapfunc[i] = % str(wc.name_to_rgb(cmapfunc[i])) cmapfunc[i] = [cmapfunc[i]]*2 for func_idx, fimg in enumerate(funcimg): if fimg.dimension != 3: raise ValueError() fimg = resample_image(fimg, (3,3,3)) funcimg_arr = fimg.numpy() funcverts, funcfaces,_,_ = skimage.measure.marching_cubes_lewiner(funcimg_arr, isofunc[func_idx], spacing=(1,1,1)) funcfig = FF.create_trisurf(x=funcverts[:,0], y=funcverts[:,1], z=funcverts[:,2], plot_edges=False, simplices=funcfaces, colormap=cmapfunc[func_idx]) funcfig[][0].update(opacity=alphafunc[func_idx]) fig_list.append(funcfig) fig_data_list.append(funcfig.data[0]) if filename is not None: save_file = image_filename = filename filename = image_filename.split()[0] + else: image_filename = filename = save_file = None try: plot(fig_data_list, image=save_file, filename=filename, image_filename=image_filename, auto_open=auto_open) except PermissionError: print()
Render an image as a base surface and an optional collection of other image. ANTsR function: `renderSurfaceFunction` NOTE: The ANTsPy version of this function is actually completely different than the ANTsR version, although they should produce similar results. Arguments --------- surfimg : ANTsImage Input image to use as rendering substrate. funcimg : ANTsImage Input list of images to use as functional overlays. alphasurf : scalar alpha for the surface contour alphafunc : scalar alpha value for functional blobs isosurf : scalar intensity level that defines lower threshold for surface image isofunc : scalar intensity level that defines lower threshold for functional image smoothsurf : scalar (optional) smoothing for the surface image smoothfunc : scalar (optional) smoothing for the functional image cmapsurf : string color map for surface image cmapfunc : string color map for functional image filename : string where to save rendering. if None, will plot interactively notebook : boolean whether you're in a jupyter notebook. Returns ------- N/A Example ------- >>> import ants >>> mni = ants.image_read(ants.get_ants_data('mni')) >>> mnia = ants.image_read(ants.get_ants_data('mnia')) >>> ants.render_surface_function(mni, mnia, alphasurf=0.1, filename='/users/ncullen/desktop/surffnc.png')
380,706
def read_file(file_name, encoding=): with open(file_name, ) as f: data = f.read() if encoding is not None: data = data.decode(encoding) return data
读文本文件 :param encoding: :param file_name: :return:
380,707
def _filter_result(result, filter_functions=None): if filter_functions is not None: for filter_func in filter_functions: result = filter(filter_func, result) return result
Filter result with given filter functions. :param result: an iterable object :param filter_functions: some filter functions :return: a filter object (filtered result)
380,708
def exception(self, timeout=None): if self._state == self.RUNNING: self._context.wait_all_futures([self], timeout) return self._exception
Return the exception raised by the call that the future represents. Args: timeout: The number of seconds to wait for the exception if the future isn't done. If None, then there is no limit on the wait time. Returns: The exception raised by the call that the future represents or None if the call completed without raising. Raises: TimeoutError: If the future didn't finish executing before the given timeout.
380,709
def preconstrain_flag_page(self, magic_content): for m, v in zip(magic_content, self.state.cgc.flag_bytes): self.preconstrain(m, v)
Preconstrain the data in the flag page. :param magic_content: The content of the magic page as a bytestring.
380,710
def skycoord_to_healpix(self, skycoord, return_offsets=False): if self.frame is None: raise NoFrameError("skycoord_to_healpix") skycoord = skycoord.transform_to(self.frame) representation = skycoord.represent_as(UnitSphericalRepresentation) lon, lat = representation.lon, representation.lat return self.lonlat_to_healpix(lon, lat, return_offsets=return_offsets)
Convert celestial coordinates to HEALPix indices (optionally with offsets). Note that this method requires that a celestial frame was specified when initializing HEALPix. If you don't know or need the celestial frame, you can instead use :meth:`~astropy_healpix.HEALPix.lonlat_to_healpix`. Parameters ---------- skycoord : :class:`~astropy.coordinates.SkyCoord` The celestial coordinates to convert return_offsets : bool If `True`, the returned values are the HEALPix pixel as well as ``dx`` and ``dy``, the fractional positions inside the pixel. If `False` (the default), only the HEALPix pixel is returned. Returns ------- healpix_index : `~numpy.ndarray` 1-D array of HEALPix indices dx, dy : `~numpy.ndarray` 1-D arrays of offsets inside the HEALPix pixel in the range [0:1] (0.5 is the center of the HEALPix pixels). This is returned if ``return_offsets`` is `True`.
380,711
def get_parameter(self, path, default=None, return_group=False): value = read_parameter_by_path(self.job[][], path, return_group) if value is None: return default return value
Reads hyperparameter from job configuration. If nothing found use given default. :param path: str :param default: * :param return_group: If true and path is a choice_group, we return the dict instead of the group name. :return: *
380,712
def add(name, beacon_data, **kwargs): *processessalt-masterstoppedapache2stopped ret = {: .format(name), : False} if name in list_(return_yaml=False, **kwargs): ret[] = .format(name) return ret if any( in key for key in beacon_data): res = next(value for value in beacon_data if in value) beacon_name = res[] else: beacon_name = name if beacon_name not in list_available(return_yaml=False, **kwargs): ret[] = .format(beacon_name) return ret if in kwargs and kwargs[]: ret[] = True ret[] = .format(name) else: try: eventer = salt.utils.event.get_event(, opts=__opts__) res = __salt__[]({: name, : beacon_data, : }, ) if res: event_ret = eventer.get_event( tag=, wait=kwargs.get(, 30)) valid = event_ret[] vcomment = event_ret[] if not valid: ret[] = False ret[] = ( .format(name, vcomment)) return ret except KeyError: ret[] = False ret[] = return ret
Add a beacon on the minion Args: name (str): Name of the beacon to configure beacon_data (dict): Dictionary or list containing configuration for beacon. Returns: dict: Boolean and status message on success or failure of add. CLI Example: .. code-block:: bash salt '*' beacons.add ps "[{'processes': {'salt-master': 'stopped', 'apache2': 'stopped'}}]"
380,713
def parse_schema(schema_file): e = xml.etree.ElementTree.parse(schema_file) root = e.getroot() cols = [] for elem in root.findall(".//{http://genomic.elet.polimi.it/entities}field"): cols.append(elem.text) return cols
parses the schema file and returns the columns that are later going to represent the columns of the genometric space dataframe :param schema_file: the path to the schema file :return: the columns of the schema file
380,714
def read(self, lenient=False): self.preamble(lenient=lenient) raw = self.idatdecomp(lenient) if self.interlace: raw = bytearray(itertools.chain(*raw)) arraycode = [self.bitdepth > 8] pixels = map(lambda *row: array(arraycode, row), *[iter(self.deinterlace(raw))]*self.width*self.planes) else: pixels = self.iterboxed(self.iterstraight(raw)) meta = dict() for attr in .split(): meta[attr] = getattr(self, attr) meta[] = (self.width, self.height) for attr in (, , , , , , , , , ): a = getattr(self, attr, None) if a is not None: meta[attr] = a if self.plte: meta[] = self.palette() return self.width, self.height, pixels, meta
Read the PNG file and decode it. Returns (`width`, `height`, `pixels`, `metadata`). May use excessive memory. `pixels` are returned in boxed row flat pixel format. If the optional `lenient` argument evaluates to True, checksum failures will raise warnings rather than exceptions.
380,715
def sub_menu(self): submenu = gtk.Menu() self.start = gtk.ImageMenuItem("Start") self.stop = gtk.ImageMenuItem("Stop") self.restart = gtk.ImageMenuItem("Restart") self.status = gtk.ImageMenuItem("Status") self.start.show() self.stop.show() self.restart.show() self.status.show() img_Start = gtk.image_new_from_stock(gtk.STOCK_MEDIA_PLAY, gtk.ICON_SIZE_MENU) img_Start.show() self.start.set_image(img_Start) img_Stop = gtk.image_new_from_stock(gtk.STOCK_STOP, gtk.ICON_SIZE_MENU) img_Stop.show() self.stop.set_image(img_Stop) img_Restart = gtk.image_new_from_stock(gtk.STOCK_REFRESH, gtk.ICON_SIZE_MENU) img_Restart.show() self.restart.set_image(img_Restart) img_Status = gtk.image_new_from_stock(gtk.STOCK_DIALOG_QUESTION, gtk.ICON_SIZE_MENU) img_Status.show() self.status.set_image(img_Status) submenu.append(self.start) submenu.append(self.stop) submenu.append(self.restart) submenu.append(self.status) self.daemon = gtk.ImageMenuItem("Daemon") self.img_daemon = gtk.image_new_from_stock(self.daemon_STOCK, gtk.ICON_SIZE_MENU) self.img_daemon.show() self.daemon.set_submenu(submenu)
Create daemon submenu
380,716
def putParamset(self, remote, address, paramset, value): if self._server is not None: return self._server.putParamset(remote, address, paramset, value)
Set paramsets manually
380,717
def on_before_trading(self, date_time): if self.cta_call[] > 0: self.cta_call[] += 1 if self.cta_put[] > 0: self.cta_put[] += 1 self.cta_call[] = False self.cta_put[] = False
开盘的时候检查,如果有持仓,就把持有天数 + 1
380,718
def print_task_output(batch_client, job_id, task_ids, encoding=None): for task_id in task_ids: file_text = read_task_file_as_string( batch_client, job_id, task_id, _STANDARD_OUT_FILE_NAME, encoding) print("{} content for task {}: ".format( _STANDARD_OUT_FILE_NAME, task_id)) print(file_text) file_text = read_task_file_as_string( batch_client, job_id, task_id, _STANDARD_ERROR_FILE_NAME, encoding) print("{} content for task {}: ".format( _STANDARD_ERROR_FILE_NAME, task_id)) print(file_text)
Prints the stdout and stderr for each task specified. Originally in azure-batch-samples.Python.Batch.common.helpers :param batch_client: The batch client to use. :type batch_client: `batchserviceclient.BatchServiceClient` :param str job_id: The id of the job to monitor. :param task_ids: The collection of tasks to print the output for. :type task_ids: `list` :param str encoding: The encoding to use when downloading the file.
380,719
def render_tag(self, context, kwargs, nodelist): self.load_configuration(**kwargs) request = context[] self.full_path = request.get_full_path() context.push() content = nodelist.render(context) context.pop() content = render_content( content, full_path=self.full_path, parent_tag=self.parent_tag, css_class=self.css_class, menu=self.menu, ignore_params=self.ignore_params, ) return content
render content with "active" urls logic
380,720
def find_in_registry(category = None, namespace = None, name = None): selected_registry = registry if category is not None: selected_registry = [re for re in selected_registry if re.category==category] if namespace is not None: selected_registry = [re for re in selected_registry if re.namespace==namespace] if name is not None: selected_registry = [re for re in selected_registry if re.name==name] if len(selected_registry)>0: return [sr.cls for sr in selected_registry] return None
Find a given category/namespace/name combination in the registry category - string, see utils.inputs.registrycategories namespace - module namespace, see settings.NAMESPACE name - lowercase name of module
380,721
def get_interpolated_gap(self, tol=0.001, abs_tol=False, spin=None): tdos = self.y if len(self.ydim) == 1 else np.sum(self.y, axis=1) if not abs_tol: tol = tol * tdos.sum() / tdos.shape[0] energies = self.x below_fermi = [i for i in range(len(energies)) if energies[i] < self.efermi and tdos[i] > tol] above_fermi = [i for i in range(len(energies)) if energies[i] > self.efermi and tdos[i] > tol] vbm_start = max(below_fermi) cbm_start = min(above_fermi) if vbm_start == cbm_start: return 0.0, self.efermi, self.efermi else: terminal_dens = tdos[vbm_start:vbm_start + 2][::-1] terminal_energies = energies[vbm_start:vbm_start + 2][::-1] start = get_linear_interpolated_value(terminal_dens, terminal_energies, tol) terminal_dens = tdos[cbm_start - 1:cbm_start + 1] terminal_energies = energies[cbm_start - 1:cbm_start + 1] end = get_linear_interpolated_value(terminal_dens, terminal_energies, tol) return end - start, end, start
Expects a DOS object and finds the gap Args: tol: tolerance in occupations for determining the gap abs_tol: Set to True for an absolute tolerance and False for a relative one. spin: Possible values are None - finds the gap in the summed densities, Up - finds the gap in the up spin channel, Down - finds the gap in the down spin channel. Returns: (gap, cbm, vbm): Tuple of floats in eV corresponding to the gap, cbm and vbm.
380,722
def commit(self): assert self.batch is not None, "No active batch, call start() first" logger.debug("Comitting batch from %d sources...", len(self.batch)) by_priority = [] for name in self.batch.keys(): priority = self.priorities.get(name, self.default_priority) by_priority.append((priority, name)) for priority, name in sorted(by_priority, key=lambda key: key[0]): logger.debug("Processing items from (priority=%d)...", name, priority) items = self.batch[name] for handlers in items.values(): for agg, handler in handlers: try: if agg is None: handler() else: handler(agg) except Exception as error: logger.exception("Error while invoking handler.") self.batch = None logger.debug("Batch committed.")
Commit a batch.
380,723
def list(self, all_pages=False, **kwargs): self._separate(kwargs) return super(Resource, self).list(all_pages=all_pages, **kwargs)
Return a list of notification templates. Note here configuration-related fields like 'notification_configuration' and 'channels' will not be used even provided. If one or more filters are provided through keyword arguments, filter the results accordingly. If no filters are provided, return all results. =====API DOCS===== Retrieve a list of objects. :param all_pages: Flag that if set, collect all pages of content from the API when returning results. :type all_pages: bool :param page: The page to show. Ignored if all_pages is set. :type page: int :param query: Contains 2-tuples used as query parameters to filter resulting resource objects. :type query: list :param `**kwargs`: Keyword arguments list of available fields used for searching resource objects. :returns: A JSON object containing details of all resource objects returned by Tower backend. :rtype: dict =====API DOCS=====
380,724
def inheritance_patch(attrs): for key, obj in attrs.items(): if isinstance(obj, attribute): if getattr(obj, , None) == AttrWriteType.READ_WRITE: if not getattr(obj, , None): method_name = obj.write_method_name or "write_" + key obj.fset = attrs.get(method_name)
Patch tango objects before they are processed by the metaclass.
380,725
def inherit_kwargs(inherit_func): import utool as ut keys, is_arbitrary = ut.get_kwargs(inherit_func) if is_arbitrary: keys += [] kwargs_append = .join(keys) def _wrp(func): if func.__doc__ is None: func.__doc__ = kwargs_block = + ut.indent(kwargs_append) func.__doc__ += kwargs_block return func return _wrp
TODO move to util_decor inherit_func = inspect_pdfs func = encoder.visualize.im_func
380,726
def get_vcs_details_output_vcs_details_node_vcs_mode(self, **kwargs): config = ET.Element("config") get_vcs_details = ET.Element("get_vcs_details") config = get_vcs_details output = ET.SubElement(get_vcs_details, "output") vcs_details = ET.SubElement(output, "vcs-details") node_vcs_mode = ET.SubElement(vcs_details, "node-vcs-mode") node_vcs_mode.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
380,727
def bug(self, container: Container) -> Bug: name = container.bug return self.__installation.bugs[name]
Returns a description of the bug inside a given container.
380,728
def debug(sequence): points = [] for i, p in enumerate(sequence): copy = Point(p) copy[] = i points.append(copy) return sequence.__class__(points)
adds information to the sequence for better debugging, currently only an index property on each point in the sequence.
380,729
def get_device_info(self, bigip): coll = bigip.tm.cm.devices.get_collection() device = [device for device in coll if device.selfDevice == ] assert len(device) == 1 return device[0]
Get device information about a specific BigIP device. :param bigip: bigip object --- device to inspect :returns: bigip object
380,730
def actions(connection): session = _make_session(connection=connection) for action in Action.ls(session=session): click.echo(f)
List all actions.
380,731
def asini(b, orbit, solve_for=None): orbit_ps = _get_system_ps(b, orbit) metawargs = orbit_ps.meta metawargs.pop() sma_def = FloatParameter(qualifier=, value=8.0, default_unit=u.solRad, description=) incl_def = FloatParameter(qualifier=, value=90.0, default_unit=u.deg, description=) asini_def = FloatParameter(qualifier=, value=8.0, default_unit=u.solRad, description=) sma, created = b.get_or_create(, sma_def, **metawargs) incl, created = b.get_or_create(, incl_def, **metawargs) asini, created = b.get_or_create(, asini_def, **metawargs) if solve_for in [None, asini]: lhs = asini rhs = sma * sin(incl) elif solve_for == sma: lhs = sma rhs = asini / sin(incl) elif solve_for == incl: lhs = incl rhs = arcsin(asini/sma) else: raise NotImplementedError return lhs, rhs, {: orbit}
Create a constraint for asini in an orbit. If any of the required parameters ('asini', 'sma', 'incl') do not exist in the orbit, they will be created. :parameter b: the :class:`phoebe.frontend.bundle.Bundle` :parameter str orbit: the label of the orbit in which this constraint should be built :parameter str solve_for: if 'asini' should not be the derived/constrained parameter, provide which other parameter should be derived (ie 'sma' or 'incl') :returns: lhs (Parameter), rhs (ConstraintParameter), args (list of arguments that were passed to this function)
380,732
def ControlFromHandle(handle: int) -> Control: return Control.CreateControlFromElement(_AutomationClient.instance().IUIAutomation.ElementFromHandle(handle))
Call IUIAutomation.ElementFromHandle with a native handle. handle: int, a native window handle. Return `Control` subclass.
380,733
def get(self, request, **resources): instance = resources.get(self._meta.name) if not instance is None: return instance return self.paginate( request, self.get_collection(request, **resources))
Default GET method. Return instance (collection) by model. :return object: instance or collection from self model
380,734
def imslic(img, n_segments=100, aspect=None): from skimage.segmentation import (slic, mark_boundaries) from skimage.morphology import (dilation) if img.ndim == 2 or img.ndim == 3 and img.shape[-1] == 1: imz = np.stack([img, img, img], 2) else: imz = img slics = slic(imz, n_segments=n_segments) boundaries = mark_boundaries(imz, slics) return plt.imshow(boundaries, aspect=aspect)
slic args : n_segments=100, compactness=10., max_iter=10, sigma=0, spacing=None, multichannel=True, convert2lab=None, enforce_connectivity=True, min_size_factor=0.5, max_size_factor=3, slic_zero=False mark_boundaries args: label_img, color=(1, 1, 0), outline_color=None, mode='outer', background_label=0 imshow args: cmap=None, norm=None, aspect=None, interpolation=None, alpha=None, vmin=None, vmax=None, origin=None, extent=None, shape=None, filternorm=1, filterrad=4.0, imlim=None, resample=None, url=None, hold=None, data=None, :param img: :param slicarg: :param slickw: :return:
380,735
def set_to_cache(self): queryset = self.get_queryset() cache.set(self._get_cache_key(), { : [ queryset.none(), queryset.query, ], : self.__class__, : tuple(self.search_fields), : int(self.max_results), : str(self.get_url()), : dict(self.dependent_fields), })
Add widget's attributes to Django's cache. Split the QuerySet, to not pickle the result set.
380,736
def parse_file(path, format=None, encoding=, force_types=True): try: with open(path, ) as f: return parse(f, format, encoding, force_types) except EnvironmentError as e: raise AnyMarkupError(e, traceback.format_exc())
A convenience wrapper of parse, which accepts path of file to parse. Args: path: path to file to parse format: explicitly override the guessed `inp` markup format encoding: file encoding, defaults to utf-8 force_types: if `True`, integers, floats, booleans and none/null are recognized and returned as proper types instead of strings; if `False`, everything is converted to strings if `None`, backend return value is used Returns: parsed `inp` (dict or list) containing unicode values Raises: AnyMarkupError if a problem occurs while parsing
380,737
def experiment_completed(self): heroku_app = HerokuApp(self.app_id) status_url = "{}/summary".format(heroku_app.url) data = {} try: resp = requests.get(status_url) data = resp.json() except (ValueError, requests.exceptions.RequestException): logger.exception("Error fetching experiment status.") logger.debug("Current application state: {}".format(data)) return data.get("completed", False)
Checks the current state of the experiment to see whether it has completed. This makes use of the experiment server `/summary` route, which in turn uses :meth:`~Experiment.is_complete`.
380,738
def is_disabled_action(view): if not isinstance(view, core_views.ActionsViewSet): return False action = getattr(view, , None) return action in view.disabled_actions if action is not None else False
Checks whether Link action is disabled.
380,739
def recv(self): try: items = self.poller.poll(self.timeout) except KeyboardInterrupt: return if items: msg = self.client.recv_multipart() self.close() if self.verbose: logging.info("I: received reply:") dump(msg) assert len(msg) >= 4 header = msg.pop(0) header = msg.pop(0) assert MDP.C_CLIENT == header header = msg.pop(0) return msg else: logging.warn("W: permanent error, abandoning request")
Returns the reply message or None if there was no reply.
380,740
def _parse_results(self, raw_results, includes_qualifiers): results = [] for res in raw_results: item = CaseInsensitiveDict() for prop_name in self.property_names: item[prop_name] = None for wmi_property in res.Properties_: if "CounterType" in qualifiers: counter_type = qualifiers["CounterType"] self._property_counter_types[wmi_property.Name] = counter_type self.logger.debug( u"Caching property qualifier CounterType: " "{class_name}.{property_names} = {counter_type}".format( class_name=self.class_name, property_names=wmi_property.Name, counter_type=counter_type ) ) else: self.logger.debug( u"CounterType qualifier not found for {class_name}.{property_names}".format( class_name=self.class_name, property_names=wmi_property.Name ) ) try: item[wmi_property.Name] = float(wmi_property.Value) except (TypeError, ValueError): item[wmi_property.Name] = wmi_property.Value results.append(item) return results
Parse WMI query results in a more comprehensive form. Returns: List of WMI objects ``` [ { 'freemegabytes': 19742.0, 'name': 'C:', 'avgdiskbytesperwrite': 1536.0 }, { 'freemegabytes': 19742.0, 'name': 'D:', 'avgdiskbytesperwrite': 1536.0 } ] ```
380,741
def select_rows(self, rows): self.values = self.values.iloc[rows] self.index = self.index.iloc[rows, :] for prop in self._property_columns: vals = getattr(self, prop)[rows] setattr(self, prop, vals)
Truncate internal arrays to keep only the specified rows. Args: rows (array): An integer or boolean array identifying the indices of rows to keep.
380,742
def atomic_to_cim_xml(obj): if obj is None: return obj elif isinstance(obj, six.text_type): return obj elif isinstance(obj, six.binary_type): return _to_unicode(obj) elif isinstance(obj, bool): return u if obj else u elif isinstance(obj, (CIMInt, six.integer_types, CIMDateTime)): return six.text_type(obj) elif isinstance(obj, datetime): return six.text_type(CIMDateTime(obj)) elif isinstance(obj, Real32): s = u.format(obj) if s == : s = u elif s in (, ): pass elif not in s: parts = s.split() parts[0] = parts[0] + s = .join(parts) return s elif isinstance(obj, (Real64, float)): s = u.format(obj) if s == : s = u elif s in (, ): pass elif not in s: parts = s.split() parts[0] = parts[0] + s = .join(parts) return s else: raise TypeError( _format("Value {0!A} has invalid type {1} for conversion to a " "CIM-XML string", obj, type(obj)))
Convert an "atomic" scalar value to a CIM-XML string and return that string. The returned CIM-XML string is ready for use as the text of a CIM-XML 'VALUE' element. Parameters: obj (:term:`CIM data type`, :term:`number`, :class:`py:datetime`): The "atomic" input value. May be `None`. Must not be an array/list/tuple. Must not be a :ref:`CIM object`. Returns: A :term:`unicode string` object in CIM-XML value format representing the input value. `None`, if the input value is `None`. Raises: TypeError
380,743
def get_season_player_stats(self, season_key, player_key): season_player_stats_url = self.api_path + "season/" + season_key + "/player/" + player_key + "/stats/" response = self.get_response(season_player_stats_url) return response
Calling Season Player Stats API. Arg: season_key: key of the season player_key: key of the player Return: json data
380,744
def _drop_schema(self, force_drop=False): connection = connections[get_tenant_database_alias()] has_schema = hasattr(connection, ) if has_schema and connection.schema_name not in (self.schema_name, get_public_schema_name()): raise Exception("Cans own schema or " "the public schema. Current schema is %s." % connection.schema_name) if has_schema and schema_exists(self.schema_name) and (self.auto_drop_schema or force_drop): self.pre_drop() cursor = connection.cursor() cursor.execute( % self.schema_name)
Drops the schema
380,745
def predict_moments(self, X): check_is_fitted(self, [, , , , ]) X = check_array(X) Phi = self.basis.transform(X, *atleast_list(self.hypers_)) Ey = Phi.dot(self.weights_) Vf = (Phi.dot(self.covariance_) * Phi).sum(axis=1) return Ey, Vf + self.var_
Full predictive distribution from Bayesian linear regression. Parameters ---------- X : ndarray (N*,d) array query input dataset (N* samples, d dimensions). Returns ------- Ey : ndarray The expected value of y* for the query inputs, X* of shape (N*,). Vy : ndarray The expected variance of y* for the query inputs, X* of shape (N*,).
380,746
def add_ecc_cgw(psr, gwtheta, gwphi, mc, dist, F, inc, psi, gamma0, e0, l0, q, nmax=100, nset=None, pd=None, periEv=True, psrTerm=True, tref=0, check=True, useFile=True): cosgwtheta, cosgwphi = N.cos(gwtheta), N.cos(gwphi) singwtheta, singwphi = N.sin(gwtheta), N.sin(gwphi) sin2psi, cos2psi = N.sin(2*psi), N.cos(2*psi) m = N.array([singwphi, -cosgwphi, 0.0]) n = N.array([-cosgwtheta*cosgwphi, -cosgwtheta*singwphi, singwtheta]) omhat = N.array([-singwtheta*cosgwphi, -singwtheta*singwphi, -cosgwtheta]) if and in psr.pars(): ptheta = N.pi/2 - psr[].val pphi = psr[].val elif and in psr.pars(): fac = 180./N.pi coords = ephem.Equatorial(ephem.Ecliptic(str(psr[].val*fac), str(psr[].val*fac))) ptheta = N.pi/2 - float(repr(coords.dec)) pphi = float(repr(coords.ra)) phat = N.array([N.sin(ptheta)*N.cos(pphi), N.sin(ptheta)*N.sin(pphi),\ N.cos(ptheta)]) fplus = 0.5 * (N.dot(m, phat)**2 - N.dot(n, phat)**2) / (1+N.dot(omhat, phat)) fcross = (N.dot(m, phat)*N.dot(n, phat)) / (1 + N.dot(omhat, phat)) cosMu = -N.dot(omhat, phat) toas = N.double(psr.toas())*86400 - tref if check: y = eu.solve_coupled_ecc_solution(F, e0, gamma0, l0, mc, q, N.array([0.0,toas.max()])) Fc0, ec0, gc0, phic0 = y[0,:] Fc1, ec1, gc1, phic1 = y[-1,:] Tobs = 1/(toas.max()-toas.min()) if N.abs(Fc0-Fc1) > 1/Tobs: print() print(.format(Fc0, Fc1, 1/Tobs)) if periEv==False: gammadot = 0.0 else: gammadot = eu.get_gammadot(F, mc, q, e0) if nset is not None: nharm = nset elif useFile: if e0 > 0.001 and e0 < 0.999: nharm = min(int(ecc_interp(e0)), nmax) + 1 elif e0 < 0.001: nharm = 3 else: nharm = nmax else: nharm = nmax splus, scross = eu.calculate_splus_scross(nharm, mc, dist, F, e0, toas, l0, gamma0, gammadot, inc) if psrTerm: pd *= eu.KPC2S tp = toas - pd * (1-cosMu) y = eu.solve_coupled_ecc_solution(F, e0, gamma0, l0, mc, q, N.array([0.0, tp.min()])) if N.any(y): Fp, ep, gp, lp = y[-1,:] gammadotp = eu.get_gammadot(Fp, mc, q, ep) if useFile: if ep > 0.001 and ep < 0.999: nharm = min(int(ecc_interp(ep)), nmax) elif ep < 0.001: nharm = 3 else: nharm = nmax else: nharm = nmax splusp, scrossp = eu.calculate_splus_scross(nharm, mc, dist, Fp, ep, toas, lp, gp, gammadotp, inc) rr = (fplus*cos2psi - fcross*sin2psi) * (splusp - splus) + \ (fplus*sin2psi + fcross*cos2psi) * (scrossp - scross) else: rr = N.zeros(len(p.toas)) else: rr = - (fplus*cos2psi - fcross*sin2psi) * splus - \ (fplus*sin2psi + fcross*cos2psi) * scross psr.stoas[:] += rr/86400
Simulate GW from eccentric SMBHB. Waveform models from Taylor et al. (2015) and Barack and Cutler (2004). WARNING: This residual waveform is only accurate if the GW frequency is not significantly evolving over the observation time of the pulsar. :param psr: pulsar object :param gwtheta: Polar angle of GW source in celestial coords [radians] :param gwphi: Azimuthal angle of GW source in celestial coords [radians] :param mc: Chirp mass of SMBMB [solar masses] :param dist: Luminosity distance to SMBMB [Mpc] :param F: Orbital frequency of SMBHB [Hz] :param inc: Inclination of GW source [radians] :param psi: Polarization of GW source [radians] :param gamma0: Initial angle of periastron [radians] :param e0: Initial eccentricity of SMBHB :param l0: Initial mean anomaly [radians] :param q: Mass ratio of SMBHB :param nmax: Number of harmonics to use in waveform decomposition :param nset: Fix the number of harmonics to be injected :param pd: Pulsar distance [kpc] :param periEv: Evolve the position of periapsis [boolean] :param psrTerm: Option to include pulsar term [boolean] :param tref: Fiducial time at which initial parameters are referenced [s] :param check: Check if frequency evolves significantly over obs. time :param useFile: Use pre-computed table of number of harmonics vs eccentricity :returns: Vector of induced residuals
380,747
def jsonify_payload(self): if isinstance(self.payload, string_types): return self.payload return json.dumps(self.payload, cls=StandardJSONEncoder)
Dump the payload to JSON
380,748
def _get_client(self): return (_oss.StsAuth if in self._storage_parameters else _oss.Auth if self._storage_parameters else _oss.AnonymousAuth)(**self._storage_parameters)
OSS2 Auth client Returns: oss2.Auth or oss2.StsAuth: client
380,749
def pool_info(name=None, **kwargs): * result = {} conn = __get_conn(**kwargs) def _pool_extract_infos(pool): states = [, , , , ] infos = pool.info() state = states[infos[0]] if infos[0] < len(states) else desc = ElementTree.fromstring(pool.XMLDesc()) path_node = desc.find() return { : pool.UUIDString(), : state, : infos[1], : infos[2], : infos[3], : pool.autostart(), : pool.isPersistent(), : path_node.text if path_node is not None else None, : desc.get() } try: pools = [pool for pool in conn.listAllStoragePools() if name is None or pool.name() == name] result = {pool.name(): _pool_extract_infos(pool) for pool in pools} except libvirt.libvirtError as err: log.debug(, str(err)) finally: conn.close() return result
Return informations on a storage pool provided its name. :param name: libvirt storage pool name :param connection: libvirt connection URI, overriding defaults :param username: username to connect with, overriding defaults :param password: password to connect with, overriding defaults If no name is provided, return the infos for all defined storage pools. .. versionadded:: 2019.2.0 CLI Example: .. code-block:: bash salt '*' virt.pool_info default
380,750
def return_buffer_contents(self, frame, force_unescaped=False): if not force_unescaped: if frame.eval_ctx.volatile: self.writeline() self.indent() self.writeline( % frame.buffer) self.outdent() self.writeline() self.indent() self.writeline( % frame.buffer) self.outdent() return elif frame.eval_ctx.autoescape: self.writeline( % frame.buffer) return self.writeline( % frame.buffer)
Return the buffer contents of the frame.
380,751
def _encode_secret_part_v2_v3(version, condition, root_key, ns): data = bytearray() data.append(version) encode_uvarint(len(root_key), data) data.extend(root_key) if version >= VERSION_3: encode_uvarint(len(ns), data) data.extend(ns) data.extend(condition.encode()) return bytes(data)
Creates a version 2 or version 3 secret part of the third party caveat. The returned data is not encrypted. The format has the following packed binary fields: version 2 or 3 [1 byte] root key length [n: uvarint] root key [n bytes] namespace length [n: uvarint] (v3 only) namespace [n bytes] (v3 only) predicate [rest of message]
380,752
def ciphertext(self, be_secure=True): if be_secure and not self.__is_obfuscated: self.obfuscate() return self.__ciphertext
Return the ciphertext of the EncryptedNumber. Choosing a random number is slow. Therefore, methods like :meth:`__add__` and :meth:`__mul__` take a shortcut and do not follow Paillier encryption fully - every encrypted sum or product should be multiplied by r ** :attr:`~PaillierPublicKey.n` for random r < n (i.e., the result is obfuscated). Not obfuscating provides a big speed up in, e.g., an encrypted dot product: each of the product terms need not be obfuscated, since only the final sum is shared with others - only this final sum needs to be obfuscated. Not obfuscating is OK for internal use, where you are happy for your own computer to know the scalars you've been adding and multiplying to the original ciphertext. But this is *not* OK if you're going to be sharing the new ciphertext with anyone else. So, by default, this method returns an obfuscated ciphertext - obfuscating it if necessary. If instead you set `be_secure=False` then the ciphertext will be returned, regardless of whether it has already been obfuscated. We thought that this approach, while a little awkward, yields a safe default while preserving the option for high performance. Args: be_secure (bool): If any untrusted parties will see the returned ciphertext, then this should be True. Returns: an int, the ciphertext. If `be_secure=False` then it might be possible for attackers to deduce numbers involved in calculating this ciphertext.
380,753
def make_site_obj(argdict): d = os.getcwd() if in argdict: d = argdict[] try: s = s2site.Site(d) except: print "Could not instantiate site object." sys.exit() return s
Instantiate and return the site. This will be used for all commands
380,754
def fromstring(cls, dis_string): temp = tempfile.NamedTemporaryFile(delete=False) temp.write(dis_string) temp.close() dis_tree = cls(dis_filepath=temp.name) os.unlink(temp.name) return dis_tree
Create a DisRSTTree instance from a string containing a *.dis parse.
380,755
def RACCU_calc(TOP, P, POP): try: result = ((TOP + P) / (2 * POP))**2 return result except Exception: return "None"
Calculate RACCU (Random accuracy unbiased). :param TOP: test outcome positive :type TOP : int :param P: condition positive :type P : int :param POP: population :type POP : int :return: RACCU as float
380,756
def remove_fetcher(self, fetcher): self._lock.acquire() try: for t, f in list(self._active_fetchers): if f is fetcher: self._active_fetchers.remove((t, f)) f._deactivated() return finally: self._lock.release()
Remove a running fetcher from the list of active fetchers. :Parameters: - `fetcher`: fetcher instance. :Types: - `fetcher`: `CacheFetcher`
380,757
def close(self, suppress_logging=False): for publisher in self.publishers: try: publisher.close() except Exception as e: self.logger.error(.format(self.name, e), exc_info=not suppress_logging) self.publishers.clear()
purges all connections. method closes ampq connection (disconnects)
380,758
def handle_key_rotate(self, now): to_rotate = False dfn = os.path.join(self.opts[], ) try: stats = os.stat(dfn) salt.utils.master.ping_all_connected_minions(self.opts)
Rotate the AES key rotation
380,759
def clone(cls, repo_location, repo_dir=None, branch_or_tag=None, temp=False): if temp: reponame = repo_location.rsplit(, 1)[-1] suffix = % .join( [str(x) for x in (reponame, branch_or_tag) if x]) repo_dir = create_tempdir(suffix=suffix, delete=True) else: repo_dir = repo_dir or os.getcwd() git_clone(repo_dir, repo_location, branch_or_tag=branch_or_tag) return cls(repo_dir)
Clone repo at repo_location into repo_dir and checkout branch_or_tag. Defaults into current working directory if repo_dir is not supplied. If 'temp' is True, a temporary directory will be created for you and the repository will be cloned into it. The tempdir is scheduled for deletion (when the process exits) through an exit function registered with the atexit module. If 'temp' is True, repo_dir is ignored. If branch_or_tag is not specified, the HEAD of the primary branch of the cloned repo is checked out.
380,760
def trimquants(self, col: str, inf: float, sup: float): try: self.df = self._trimquants(col, inf, sup) except Exception as e: self.err(e, self.trimquants, "Can not trim quantiles")
Remove superior and inferior quantiles from the dataframe :param col: column name :type col: str :param inf: inferior quantile :type inf: float :param sup: superior quantile :type sup: float :example: ``ds.trimquants("Col 1", 0.01, 0.99)``
380,761
def mine_get(tgt, fun, tgt_type=, opts=None): mine, pass in the target, function to look up and the target type minions,minions/{0}mine') if not isinstance(mdata, dict): continue if not _ret_dict and functions and functions[0] in mdata: ret[minion] = mdata.get(functions) elif _ret_dict: for fun in functions: if fun in mdata: ret.setdefault(fun, {})[minion] = mdata.get(fun) return ret
Gathers the data from the specified minions' mine, pass in the target, function to look up and the target type
380,762
def GetElapsedMs(self): counter = c_uint64() ret = vmGuestLib.VMGuestLib_GetElapsedMs(self.handle.value, byref(counter)) if ret != VMGUESTLIB_ERROR_SUCCESS: raise VMGuestLibException(ret) return counter.value
Retrieves the number of milliseconds that have passed in the virtual machine since it last started running on the server. The count of elapsed time restarts each time the virtual machine is powered on, resumed, or migrated using VMotion. This value counts milliseconds, regardless of whether the virtual machine is using processing power during that time. You can combine this value with the CPU time used by the virtual machine (VMGuestLib_GetCpuUsedMs) to estimate the effective virtual machine CPU speed. cpuUsedMs is a subset of this value.
380,763
def moments_XX(X, remove_mean=False, modify_data=False, weights=None, sparse_mode=, sparse_tol=0.0, column_selection=None, diag_only=False): r if weights is not None: assert X.shape[0] == weights.shape[0], if diag_only and sparse_mode is not : if sparse_mode is : import warnings warnings.warn() sparse_mode = X0, mask_X, xconst = _sparsify(X, remove_mean=remove_mean, modify_data=modify_data, sparse_mode=sparse_mode, sparse_tol=sparse_tol) is_sparse = mask_X is not None X0, xconst = _copy_convert(X0, const=xconst, remove_mean=remove_mean, copy=is_sparse or (remove_mean and not modify_data)) w, sx, sx0_centered = _sum(X0, xmask=mask_X, xconst=xconst, symmetric=False, remove_mean=remove_mean, weights=weights) if remove_mean: _center(X0, w, sx, mask=mask_X, const=xconst, inplace=True) if column_selection is not None: if is_sparse: Xk = X[:, column_selection] mask_Xk = mask_X[column_selection] X0k = Xk[:, mask_Xk] xksum = sx0_centered[column_selection] xkconst = Xk[0, ~mask_Xk] X0k, xkconst = _copy_convert(X0k, const=xkconst, remove_mean=remove_mean, copy=True) C = _M2(X0, X0k, mask_X=mask_X, mask_Y=mask_Xk, xsum=sx0_centered, xconst=xconst, ysum=xksum, yconst=xkconst, weights=weights) else: X0k = X0[:, column_selection] C = _M2(X0, X0k, mask_X=mask_X, mask_Y=mask_X, xsum=sx0_centered, xconst=xconst, ysum=sx0_centered[column_selection], yconst=xconst, weights=weights) else: C = _M2(X0, X0, mask_X=mask_X, mask_Y=mask_X, xsum=sx0_centered, xconst=xconst, ysum=sx0_centered, yconst=xconst, weights=weights, diag_only=diag_only) return w, sx, C
r""" Computes the first two unnormalized moments of X Computes :math:`s = \sum_t x_t` and :math:`C = X^\top X` while exploiting zero or constant columns in the data matrix. Parameters ---------- X : ndarray (T, M) Data matrix remove_mean : bool True: remove column mean from the data, False: don't remove mean. modify_data : bool If remove_mean=True, the mean will be removed in the data matrix X, without creating an independent copy. This option is faster but might lead to surprises because your input array is changed. weights: None or ndarray(T, ) weights assigned to each trajectory point. If None, all data points have weight one. If ndarray, each data point is assigned a separate weight. sparse_mode : str one of: * 'dense' : always use dense mode * 'sparse' : always use sparse mode if possible * 'auto' : automatic sparse_tol: float Threshold for considering column to be zero in order to save computing effort when the data is sparse or almost sparse. If max(abs(X[:, i])) < sparse_tol, then row i (and also column i if Y is not given) of the covariance matrix will be set to zero. If Y is given and max(abs(Y[:, i])) < sparse_tol, then column i of the covariance matrix will be set to zero. column_selection: ndarray(k, dtype=int) or None Indices of those columns that are to be computed. If None, all columns are computed. diag_only: bool If True, the computation is restricted to the diagonal entries (autocorrelations) only. Returns ------- w : float statistical weight s : ndarray (M) sum C : ndarray (M, M) unnormalized covariance matrix
380,764
def _set_shape(self, shape): try: shape = (int(shape),) except TypeError: pass shp = list(shape) shp[0] = timetools.Period()/self.simulationstep shp[0] = int(numpy.ceil(round(shp[0], 10))) getattr(self.fastaccess, self.name).ratios = numpy.zeros( shp, dtype=float)
Private on purpose.
380,765
def get_me(self): response = self.request_json(self.config[]) user = objects.Redditor(self, response[], response) user.__class__ = objects.LoggedInRedditor return user
Return a LoggedInRedditor object. Note: This function is only intended to be used with an 'identity' providing OAuth2 grant.
380,766
def colorize_text(self, text): result = text result = self.colorize_heading(result) result = self.colorize_block_indent(result) result = self.colorize_backticks(result) return result
Colorize the text.
380,767
def _values(self): if self.interpolate: return [ val[0] for serie in self.series for val in serie.interpolated ] else: return super(Line, self)._values
Getter for series values (flattened)
380,768
def process_event(self, event, ipmicmd, seldata): event[] = None evdata = event[] if evdata[0] & 0b11000000 == 0b10000000: event[] = evdata[1] if evdata[0] & 0b110000 == 0b100000: event[] = evdata[2]
Modify an event according with OEM understanding. Given an event, allow an OEM module to augment it. For example, event data fields can have OEM bytes. Other times an OEM may wish to apply some transform to some field to suit their conventions.
380,769
def publish_topology_closed(self, topology_id): event = TopologyClosedEvent(topology_id) for subscriber in self.__topology_listeners: try: subscriber.closed(event) except Exception: _handle_exception()
Publish a TopologyClosedEvent to all topology listeners. :Parameters: - `topology_id`: A unique identifier for the topology this server is a part of.
380,770
def select_pane(self, target_pane): if target_pane in [, , , , ]: proc = self.cmd(, % self.id, target_pane) else: proc = self.cmd(, % target_pane) if proc.stderr: raise exc.LibTmuxException(proc.stderr) return self.attached_pane
Return selected :class:`Pane` through ``$ tmux select-pane``. Parameters ---------- target_pane : str 'target_pane', '-U' ,'-D', '-L', '-R', or '-l'. Return ------ :class:`Pane`
380,771
def company(random=random, *args, **kwargs): return random.choice([ "faculty of applied {noun}", "{noun}{second_noun} studios", "{noun}{noun}{noun} studios", "{noun}shop", "{noun} studies department", "the law offices of {lastname}, {noun}, and {other_lastname}", "{country} ministry of {plural}", "{city} municipal {noun} department", "{city} plumbing", "department of {noun} studies", "{noun} management systems", "{plural} r us", "inter{verb}", "the {noun} warehouse", "integrated {noun} and {second_noun}", "the {noun} and {second_noun} pub", "e-cyber{verb}", "{adjective}soft", "{domain} Inc.", "{thing} incorporated", "{noun}co", ]).format(noun=noun(random=random), plural=plural(random=random), country=country(random=random), city=city(random=random), adjective=adjective(random=random), lastname=lastname(random=random), other_lastname=lastname(random=random), domain=domain(random=random), second_noun=noun(random=random), verb=verb(random=random), thing=thing(random=random))
Produce a company name >>> mock_random.seed(0) >>> company(random=mock_random) 'faculty of applied chimp' >>> mock_random.seed(1) >>> company(random=mock_random) 'blistersecret studios' >>> mock_random.seed(2) >>> company(random=mock_random) 'pooppooppoop studios' >>> mock_random.seed(3) >>> company(random=mock_random) 'britchesshop' >>> mock_random.seed(4) >>> company(random=mock_random, capitalize=True) 'Mystery Studies Department' >>> mock_random.seed(5) >>> company(random=mock_random, slugify=True) 'the-law-offices-of-magnificentslap-boatbench-and-smellmouse'
380,772
def _create_sot_file(self): try: self._delete_file(filename="sot_file") except Exception: pass commands = [ "terminal dont-ask", "checkpoint file sot_file", "no terminal dont-ask", ] self._send_command_list(commands)
Create Source of Truth file to compare.
380,773
def discover_modules(self): sphinxsphinx.util\.util$sphinx.util modules = [self.package_name] for dirpath, dirnames, filenames in os.walk(self.root_path): root_uri = self._path2uri(os.path.join(self.root_path, dirpath)) filenames = [f[:-3] for f in filenames if f.endswith() and not f.startswith()] for filename in filenames: package_uri = .join((dirpath, filename)) for subpkg_name in dirnames + filenames: package_uri = .join((root_uri, subpkg_name)) package_path = self._uri2path(package_uri) if (package_path and self._survives_exclude(package_uri, )): modules.append(package_uri) return sorted(modules)
Return module sequence discovered from ``self.package_name`` Parameters ---------- None Returns ------- mods : sequence Sequence of module names within ``self.package_name`` Examples -------- >>> dw = ApiDocWriter('sphinx') >>> mods = dw.discover_modules() >>> 'sphinx.util' in mods True >>> dw.package_skip_patterns.append('\.util$') >>> 'sphinx.util' in dw.discover_modules() False >>>
380,774
def add_page_if_missing(request): try: page = Page.objects.for_request(request, best_match=True) return { : page, : page, } except Page.DoesNotExist: return {}
Returns ``feincms_page`` for request.
380,775
def update(self, reseed): if self._clear: for i in range(0, 3): self._screen.print_at(" ", self._x, self._screen.start_line + self._y + i) self._maybe_reseed(reseed) else: for i in range(0, 3): self._screen.print_at(chr(randint(32, 126)), self._x, self._screen.start_line + self._y + i, Screen.COLOUR_GREEN) for i in range(4, 6): self._screen.print_at(chr(randint(32, 126)), self._x, self._screen.start_line + self._y + i, Screen.COLOUR_GREEN, Screen.A_BOLD) self._maybe_reseed(reseed)
Update that trail! :param reseed: Whether we are in the normal reseed cycle or not.
380,776
def evaluate_world_model( real_env, hparams, world_model_dir, debug_video_path, split=tf.estimator.ModeKeys.EVAL, ): frame_stack_size = hparams.frame_stack_size rollout_subsequences = [] def initial_frame_chooser(batch_size): assert batch_size == len(rollout_subsequences) return np.stack([ [frame.observation.decode() for frame in subsequence[:frame_stack_size]] for subsequence in rollout_subsequences ]) env_fn = rl.make_simulated_env_fn_from_hparams( real_env, hparams, batch_size=hparams.wm_eval_batch_size, initial_frame_chooser=initial_frame_chooser, model_dir=world_model_dir ) sim_env = env_fn(in_graph=False) subsequence_length = int( max(hparams.wm_eval_rollout_ratios) * hparams.simulated_rollout_length ) rollouts = real_env.current_epoch_rollouts( split=split, minimal_rollout_frames=(subsequence_length + frame_stack_size) ) video_writer = common_video.WholeVideoWriter( fps=10, output_path=debug_video_path, file_format="avi" ) reward_accuracies_by_length = { int(ratio * hparams.simulated_rollout_length): [] for ratio in hparams.wm_eval_rollout_ratios } for _ in range(hparams.wm_eval_num_batches): rollout_subsequences[:] = random_rollout_subsequences( rollouts, hparams.wm_eval_batch_size, subsequence_length + frame_stack_size ) eval_subsequences = [ subsequence[(frame_stack_size - 1):] for subsequence in rollout_subsequences ] sim_init_obs = sim_env.reset() def decode_real_obs(index): return np.stack([ subsequence[index].observation.decode() for subsequence in eval_subsequences ]) real_init_obs = decode_real_obs(0) assert np.all(sim_init_obs == real_init_obs) debug_frame_batches = [] def append_debug_frame_batch(sim_obs, real_obs, sim_cum_rews, real_cum_rews, sim_rews, real_rews): rews = [[sim_cum_rews, sim_rews], [real_cum_rews, real_rews]] headers = [] for j in range(len(sim_obs)): local_nps = [] for i in range(2): img = PIL_Image().new("RGB", (sim_obs.shape[-2], 11),) draw = PIL_ImageDraw().Draw(img) draw.text((0, 0), "c:{:3}, r:{:3}".format(int(rews[i][0][j]), int(rews[i][1][j])), fill=(255, 0, 0)) local_nps.append(np.asarray(img)) local_nps.append(np.zeros_like(local_nps[0])) headers.append(np.concatenate(local_nps, axis=1)) errs = absolute_hinge_difference(sim_obs, real_obs) headers = np.stack(headers) debug_frame_batches.append( np.concatenate([headers, np.concatenate([sim_obs, real_obs, errs], axis=2)], axis=1) ) append_debug_frame_batch(sim_init_obs, real_init_obs, np.zeros(hparams.wm_eval_batch_size), np.zeros(hparams.wm_eval_batch_size), np.zeros(hparams.wm_eval_batch_size), np.zeros(hparams.wm_eval_batch_size)) (sim_cum_rewards, real_cum_rewards) = ( np.zeros(hparams.wm_eval_batch_size) for _ in range(2) ) for i in range(subsequence_length): actions = [subsequence[i].action for subsequence in eval_subsequences] (sim_obs, sim_rewards, _) = sim_env.step(actions) sim_cum_rewards += sim_rewards real_rewards = np.array([ subsequence[i + 1].reward for subsequence in eval_subsequences ]) real_cum_rewards += real_rewards for (length, reward_accuracies) in six.iteritems( reward_accuracies_by_length ): if i + 1 == length: reward_accuracies.append( np.sum(sim_cum_rewards == real_cum_rewards) / len(real_cum_rewards) ) real_obs = decode_real_obs(i + 1) append_debug_frame_batch(sim_obs, real_obs, sim_cum_rewards, real_cum_rewards, sim_rewards, real_rewards) for debug_frames in np.stack(debug_frame_batches, axis=1): debug_frame = None for debug_frame in debug_frames: video_writer.write(debug_frame) if debug_frame is not None: for _ in range(2): video_writer.write(np.zeros_like(debug_frame)) video_writer.finish_to_disk() return { "reward_accuracy/at_{}".format(length): np.mean(reward_accuracies) for (length, reward_accuracies) in six.iteritems( reward_accuracies_by_length ) }
Evaluate the world model (reward accuracy).
380,777
def get_too_few_non_zero_degree_day_warning( model_type, balance_point, degree_day_type, degree_days, minimum_non_zero ): warnings = [] n_non_zero = int((degree_days > 0).sum()) if n_non_zero < minimum_non_zero: warnings.append( EEMeterWarning( qualified_name=( "eemeter.caltrack_daily.{model_type}.too_few_non_zero_{degree_day_type}".format( model_type=model_type, degree_day_type=degree_day_type ) ), description=( "Number of non-zero daily {degree_day_type} values below accepted minimum." " Candidate fit not attempted.".format( degree_day_type=degree_day_type.upper() ) ), data={ "n_non_zero_{degree_day_type}".format( degree_day_type=degree_day_type ): n_non_zero, "minimum_non_zero_{degree_day_type}".format( degree_day_type=degree_day_type ): minimum_non_zero, "{degree_day_type}_balance_point".format( degree_day_type=degree_day_type ): balance_point, }, ) ) return warnings
Return an empty list or a single warning wrapped in a list regarding non-zero degree days for a set of degree days. Parameters ---------- model_type : :any:`str` Model type (e.g., ``'cdd_hdd'``). balance_point : :any:`float` The balance point in question. degree_day_type : :any:`str` The type of degree days (``'cdd'`` or ``'hdd'``). degree_days : :any:`pandas.Series` A series of degree day values. minimum_non_zero : :any:`int` Minimum allowable number of non-zero degree day values. Returns ------- warnings : :any:`list` of :any:`eemeter.EEMeterWarning` Empty list or list of single warning.
380,778
def setWidth(self, vehID, width): self._connection._sendDoubleCmd( tc.CMD_SET_VEHICLE_VARIABLE, tc.VAR_WIDTH, vehID, width)
setWidth(string, double) -> None Sets the width in m for this vehicle.
380,779
def get_preview_name(self): if self.safe_type == EsaSafeType.OLD_TYPE: name = _edit_name(self.tile_id, AwsConstants.PVI, delete_end=True) else: name = .join([self.tile_id.split()[1], self.get_datatake_time(), AwsConstants.PVI]) return .format(name)
Returns .SAFE name of full resolution L1C preview :return: name of preview file :rtype: str
380,780
def set_listener_policy(name, port, policies=None, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if not exists(name, region, key, keyid, profile): return True if policies is None: policies = [] try: conn.set_lb_policies_of_listener(name, port, policies) log.info(, policies, name, port) except boto.exception.BotoServerError as e: log.info(, policies, name, port, e.message, exc_info_on_loglevel=logging.DEBUG) return False return True
Set the policies of an ELB listener. .. versionadded:: 2016.3.0 CLI example: .. code-block:: Bash salt myminion boto_elb.set_listener_policy myelb 443 "[policy1,policy2]"
380,781
def create(self, vals, check=True): if not and in vals: tmp_room_lines = vals.get(, []) vals[] = vals.get(, ) vals.update({: []}) folio_id = super(HotelFolio, self).create(vals) for line in (tmp_room_lines): line[2].update({: folio_id}) vals.update({: tmp_room_lines}) folio_id.write(vals) else: if not vals: vals = {} vals[] = self.env[].next_by_code() vals[] = vals.get(, 0.0) or vals.get(, 0.0) folio_id = super(HotelFolio, self).create(vals) folio_room_line_obj = self.env[] h_room_obj = self.env[] try: for rec in folio_id: if not rec.reservation_id: for room_rec in rec.room_lines: prod = room_rec.product_id.name room_obj = h_room_obj.search([(, , prod)]) room_obj.write({: False}) vals = {: room_obj.id, : rec.checkin_date, : rec.checkout_date, : rec.id, } folio_room_line_obj.create(vals) except: for rec in folio_id: for room_rec in rec.room_lines: prod = room_rec.product_id.name room_obj = h_room_obj.search([(, , prod)]) room_obj.write({: False}) vals = {: room_obj.id, : rec.checkin_date, : rec.checkout_date, : rec.id, } folio_room_line_obj.create(vals) return folio_id
Overrides orm create method. @param self: The object pointer @param vals: dictionary of fields value. @return: new record set for hotel folio.
380,782
def create_index(self): es = self._init_connection() if not es.indices.exists(index=self.index): es.indices.create(index=self.index, body=self.settings)
Override to provide code for creating the target index. By default it will be created without any special settings or mappings.
380,783
def get_account_info(self): headers = self._manager.get_account_headers() return (headers.get("x-account-container-count"), headers.get("x-account-bytes-used"))
Returns a tuple for the number of containers and total bytes in the account.
380,784
def get_package_path(name): name = name.lower() pkg = importlib.import_module(name) return Path(pkg.__file__).parent
Get the path to an installed package. name (unicode): Package name. RETURNS (Path): Path to installed package.
380,785
def write_incron_file_verbose(user, path): s incrontab and return error message on error CLI Example: .. code-block:: bash salt incron.write_incron_file_verbose root /tmp/new_incron cmd.run_all'](_get_incron_cmdstr(path), runas=user, python_shell=False)
Writes the contents of a file to a user's incrontab and return error message on error CLI Example: .. code-block:: bash salt '*' incron.write_incron_file_verbose root /tmp/new_incron
380,786
def I(r, limbdark): if limbdark.ldmodel == QUADRATIC: u1 = limbdark.u1 u2 = limbdark.u2 return (1-u1*(1-np.sqrt(1-r**2))-u2*(1-np.sqrt(1-r**2))**2)/(1-u1/3-u2/6)/np.pi elif limbdark.ldmodel == KIPPING: a = np.sqrt(limbdark.q1) b = 2*limbdark.q2 u1 = a*b u2 = a*(1 - b) return (1-u1*(1-np.sqrt(1-r**2))-u2*(1-np.sqrt(1-r**2))**2)/(1-u1/3-u2/6)/np.pi elif limbdark.ldmodel == NONLINEAR: raise Exception() else: raise Exception()
The standard quadratic limb darkening law. :param ndarray r: The radius vector :param limbdark: A :py:class:`pysyzygy.transit.LIMBDARK` instance containing the limb darkening law information :returns: The stellar intensity as a function of `r`
380,787
def _output_function_label(self): if self.asm_code: return True if not self.blocks: return True the_block = next((b for b in self.blocks if b.addr == self.addr), None) if the_block is None: return True if not the_block.instructions: return True if not the_block.instructions[0].labels: return True return False
Determines if we want to output the function label in assembly. We output the function label only when the original instruction does not output the function label. :return: True if we should output the function label, False otherwise. :rtype: bool
380,788
def create(self, acl=None): parent, name = getParentAndBase(self.path) json = { : name } if acl is not None: json[] = acl.to_api_param() response = self.client.postJsonHelper(DataDirectory._getUrl(parent), json, False) if (response.status_code != 200): raise DataApiError("Directory creation failed: " + str(response.content))
Creates a directory, optionally include Acl argument to set permissions
380,789
def extract_files_from_dict(d): files = {} for key, value in six.iteritems(d): if isinstance(value, dict): files[key] = extract_files_from_dict(value) elif is_file_like(value): files[key] = value return files
Return any file objects from the provided dict. >>> extract_files_from_dict({ ... 'oauth_token': 'foo', ... 'track': { ... 'title': 'bar', ... 'asset_data': open('setup.py', 'rb') ... }}) # doctest:+ELLIPSIS {'track': {'asset_data': <...}}
380,790
def from_raw_script(cls, raw_script): script = format_raw_script(raw_script) if not script: raise EmptyCommand expanded = shell.from_shell(script) output = get_output(script, expanded) return cls(expanded, output)
Creates instance of `Command` from a list of script parts. :type raw_script: [basestring] :rtype: Command :raises: EmptyCommand
380,791
def are_equal(self, sp1, sp2): for s1 in sp1.keys(): spin1 = getattr(s1, "spin", 0) oxi1 = getattr(s1, "oxi_state", 0) for s2 in sp2.keys(): spin2 = getattr(s2, "spin", 0) oxi2 = getattr(s2, "oxi_state", 0) if (s1.symbol == s2.symbol and oxi1 == oxi2 and spin2 == -spin1): break else: return False return True
True if species are exactly the same, i.e., Fe2+ == Fe2+ but not Fe3+. and the spins are reversed. i.e., spin up maps to spin down, and vice versa. Args: sp1: First species. A dict of {specie/element: amt} as per the definition in Site and PeriodicSite. sp2: Second species. A dict of {specie/element: amt} as per the definition in Site and PeriodicSite. Returns: Boolean indicating whether species are equal.
380,792
def output(self, _filename): txt = for c in self.contracts: txt += "\nContract %s\n"%c.name table = PrettyTable([, ]) for v in c.state_variables: table.add_row([v.name, _get(v, c)]) txt += str(table) txt += "\n" for f in c.functions_and_modifiers_not_inherited: txt += "\nFunction %s\n"%f.full_name table = PrettyTable([, ]) for v in f.variables: table.add_row([v.name, _get(v, f)]) for v in c.state_variables: table.add_row([v.canonical_name, _get(v, f)]) txt += str(table) self.info(txt)
_filename is not used Args: _filename(string)
380,793
def radviz(X, y=None, ax=None, features=None, classes=None, color=None, colormap=None, alpha=1.0, **kwargs): visualizer = RadialVisualizer( ax, features, classes, color, colormap, alpha, **kwargs ) visualizer.fit(X, y, **kwargs) visualizer.transform(X) return visualizer.ax
Displays each feature as an axis around a circle surrounding a scatter plot whose points are each individual instance. This helper function is a quick wrapper to utilize the RadialVisualizer (Transformer) for one-off analysis. Parameters ---------- X : ndarray or DataFrame of shape n x m A matrix of n instances with m features y : ndarray or Series of length n An array or series of target or class values ax : matplotlib Axes, default: None The axes to plot the figure on. features : list of strings, default: None The names of the features or columns classes : list of strings, default: None The names of the classes in the target color : list or tuple of colors, default: None Specify the colors for each individual class colormap : string or matplotlib cmap, default: None Sequential colormap for continuous target alpha : float, default: 1.0 Specify a transparency where 1 is completely opaque and 0 is completely transparent. This property makes densely clustered points more visible. Returns ------- ax : matplotlib axes Returns the axes that the parallel coordinates were drawn on.
380,794
def _load_assembly_mapping_data(filename): try: assembly_mapping_data = {} with tarfile.open(filename, "r") as tar: for member in tar.getmembers(): if ".json" in member.name: with tar.extractfile(member) as tar_file: tar_bytes = tar_file.read() assembly_mapping_data[member.name.split(".")[0]] = json.loads( tar_bytes.decode("utf-8") ) return assembly_mapping_data except Exception as err: print(err) return None
Load assembly mapping data. Parameters ---------- filename : str path to compressed archive with assembly mapping data Returns ------- assembly_mapping_data : dict dict of assembly maps if loading was successful, else None Notes ----- Keys of returned dict are chromosomes and values are the corresponding assembly map.
380,795
def _validate_data(data): data_keys = set(data.keys()) extra_keys = data_keys - set(ALLOWED_KEYS) missing_keys = set(REQUIRED_KEYS) - data_keys if extra_keys: raise ValueError( .format(.join(extra_keys)) ) if missing_keys: raise ValueError( .format(.join(missing_keys)) )
Validates the given data and raises an error if any non-allowed keys are provided or any required keys are missing. :param data: Data to send to API :type data: dict
380,796
def _analyze_variable_attributes(self, attributes): if in attributes: self._indexed = attributes[] super(EventVariableSolc, self)._analyze_variable_attributes(attributes)
Analyze event variable attributes :param attributes: The event variable attributes to parse. :return: None
380,797
def _EvaluateElementsDataSize(self, context): elements_data_size = None if self._data_type_definition.elements_data_size: elements_data_size = self._data_type_definition.elements_data_size elif self._data_type_definition.elements_data_size_expression: expression = self._data_type_definition.elements_data_size_expression namespace = {} if context and context.values: namespace.update(context.values) namespace[] = {} try: elements_data_size = eval(expression, namespace) except Exception as exception: raise errors.MappingError( .format( exception)) if elements_data_size is None or elements_data_size < 0: raise errors.MappingError( .format(elements_data_size)) return elements_data_size
Evaluates elements data size. Args: context (DataTypeMapContext): data type map context. Returns: int: elements data size. Raises: MappingError: if the elements data size cannot be determined.
380,798
def p0f_impersonate(pkt, osgenre=None, osdetails=None, signature=None, extrahops=0, mtu=1500, uptime=None): pkt = pkt.copy() while pkt.haslayer(IP) and pkt.haslayer(TCP): pkt = pkt.getlayer(IP) if isinstance(pkt.payload, TCP): break pkt = pkt.payload if not isinstance(pkt, IP) or not isinstance(pkt.payload, TCP): raise TypeError("Not a TCP/IP packet") db = p0f_selectdb(pkt.payload.flags) if osgenre: pb = db.get_base() if pb is None: pb = [] pb = [x for x in pb if x[6] == osgenre] if osdetails: pb = [x for x in pb if x[7] == osdetails] elif signature: pb = [signature] else: pb = p0f_getlocalsigs()[db] if db == p0fr_kdb: if pkt.payload.flags & 0x4 == 0x4: pb = [x for x in pb if in x[5]] else: pb = [x for x in pb if not in x[5]] if not pb: raise Scapy_Exception("No match in the p0f database") pers = pb[random.randint(0, len(pb) - 1)] orig_opts = dict(pkt.payload.options) int_only = lambda val: val if isinstance(val, six.integer_types) else None mss_hint = int_only(orig_opts.get()) wscale_hint = int_only(orig_opts.get()) ts_hint = [int_only(o) for o in orig_opts.get(, (None, None))] options = [] if pers[4] != : for opt in pers[4].split(): if opt[0] == : if pers[0][0] == : maxmss = (2**16 - 1) // int(pers[0][1:]) else: maxmss = (2**16 - 1) if mss_hint and not 0 <= mss_hint <= maxmss: mss_hint = None ts_a = ts_hint[0] else: ts_a = random.randint(120, 100 * 60 * 60 * 24 * 365) if not in pers[5]: ts_b = 0 elif ts_hint[1] and 0 < ts_hint[1] < 2**32: ts_b = ts_hint[1] else: ts_b = random.randint(1, 2**32 - 1) options.append((, (ts_a, ts_b))) elif opt == : options.append((, )) elif opt == : options.append((, None)) elif opt == : options.append((, None)) elif opt[0] == : if int(opt[1:]) in TCPOptions[0]: optname = TCPOptions[0][int(opt[1:])][0] optstruct = TCPOptions[0][int(opt[1:])][1] options.append((optname, struct.unpack(optstruct, RandString(struct.calcsize(optstruct))._fix()))) else: options.append((int(opt[1:]), )) else: warning("unhandled TCP option " + opt) pkt.payload.options = options if pers[0] == : pkt.payload.window = RandShort() elif pers[0].isdigit(): pkt.payload.window = int(pers[0]) elif pers[0][0] == : coef = int(pers[0][1:]) pkt.payload.window = coef * RandNum(min=1, max=(2**16 - 1) // coef) elif pers[0][0] == : pkt.payload.window = mtu * int(pers[0][1:]) elif pers[0][0] == : mss = [x for x in options if x[0] == ] if not mss: raise Scapy_Exception("TCP window value requires MSS, and MSS option not set") pkt.payload.window = mss[0][1] * int(pers[0][1:]) else: raise Scapy_Exception() pkt.ttl = pers[1] - extrahops pkt.flags |= (2 * pers[2]) if pers[5] != : for qq in pers[5]: if qq == : pkt.id = 0 elif qq == : pkt.payload.urgptr = RandShort() elif qq == : pkt.payload.ack = RandInt() elif qq == : if db == p0fo_kdb: pkt.payload.flags |= 0x20 else: pkt.payload.flags |= random.choice([8, 32, 40]) elif qq == and db != p0fo_kdb: pkt /= conf.raw_layer(load=RandString(random.randint(1, 10))) elif qq == : pkt.payload.seq = pkt.payload.ack if in pers[5]: pkt.payload.seq = 0 elif pkt.payload.seq == 0: pkt.payload.seq = RandInt() while pkt.underlayer: pkt = pkt.underlayer return pkt
Modifies pkt so that p0f will think it has been sent by a specific OS. If osdetails is None, then we randomly pick up a personality matching osgenre. If osgenre and signature are also None, we use a local signature (using p0f_getlocalsigs). If signature is specified (as a tuple), we use the signature. For now, only TCP Syn packets are supported. Some specifications of the p0f.fp file are not (yet) implemented.
380,799
def GetLocations(): r = clc.v1.API.Call(,,{}) if r[] != True: if clc.args: clc.v1.output.Status(,3, % (,r[],r[])) raise Exception( % (,r[],r[])) elif int(r[]) == 0: clc.LOCATIONS = [x[] for x in r[]] return(r[])
Return all cloud locations available to the calling alias.