Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
380,500
def killCells(i, options, tm): if options.simulation == "killer": if i == options.switchover: print "i=",i,"Killing cells for the first time!" tm.killCells(percent = options.noise) if i == options.secondKill: print "i=",i,"Killing cells again up to",options.secondNoise tm.killCells(percent = options.secondNoise) elif options.simulation == "killingMeSoftly" and (i%100 == 0): steps = (options.secondKill - options.switchover)/100 nsteps = (options.secondNoise - options.noise)/steps noise = options.noise + nsteps*(i-options.switchover)/100 if i in xrange(options.switchover, options.secondKill+1): print "i=",i,"Killing cells!" tm.killCells(percent = noise)
Kill cells as appropriate
380,501
def query_by_account(self, account_id, end_time=None, start_time=None): path = {} data = {} params = {} path["account_id"] = account_id if start_time is not None: params["start_time"] = start_time if end_time is not None: params["end_time"] = end_time self.logger.debug("GET /api/v1/audit/authentication/accounts/{account_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("GET", "/api/v1/audit/authentication/accounts/{account_id}".format(**path), data=data, params=params, no_data=True)
Query by account. List authentication events for a given account.
380,502
def psetex(self, key, milliseconds, value): return self._execute( [b, key, ascii(milliseconds), value], b)
:meth:`~tredis.RedisClient.psetex` works exactly like :meth:`~tredis.RedisClient.psetex` with the sole difference that the expire time is specified in milliseconds instead of seconds. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to set :type key: :class:`str`, :class:`bytes` :param int milliseconds: Number of milliseconds for TTL :param value: The value to set :type value: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
380,503
def bdecode(text): text = text.decode() def bdecode_next(start): if text[start] == : end = text.find(, start) return int(text[start+1:end], 10), end + 1 if text[start] == : res = [] start += 1 while text[start] != : elem, start = bdecode_next(start) res.append(elem) return res, start + 1 if text[start] == : res = {} start += 1 while text[start] != : key, start = bdecode_next(start) value, start = bdecode_next(start) res[key] = value return res, start + 1 lenend = text.find(, start) length = int(text[start:lenend], 10) end = lenend + length + 1 return text[lenend+1:end], end return bdecode_next(0)[0]
Decodes a bencoded bytearray and returns it as a python object
380,504
def published_tracks(self): if self._published_tracks is None: self._published_tracks = PublishedTrackList( self._version, room_sid=self._solution[], participant_sid=self._solution[], ) return self._published_tracks
Access the published_tracks :returns: twilio.rest.video.v1.room.room_participant.room_participant_published_track.PublishedTrackList :rtype: twilio.rest.video.v1.room.room_participant.room_participant_published_track.PublishedTrackList
380,505
def values(self, *fields): if not fields: fields = self._get_simple_fields() fields = self._coerce_fields_parameters(fields) self._instances = False self._values = {: fields, : } return self
Ask the collection to return a list of dict of given fields for each instance found in the collection. If no fields are given, all "simple value" fields are used.
380,506
def track(self, event_key, user_id, attributes=None, event_tags=None): if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format()) return if not validator.is_non_empty_string(event_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format()) return if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format()) return if not self._validate_user_inputs(attributes, event_tags): return event = self.config.get_event(event_key) if not event: self.logger.info( % (user_id, event_key)) return conversion_event = self.event_builder.create_conversion_event(event_key, user_id, attributes, event_tags) self.logger.info( % (event_key, user_id)) self.logger.debug( % ( conversion_event.url, conversion_event.params )) try: self.event_dispatcher.dispatch_event(conversion_event) except: self.logger.exception() self.notification_center.send_notifications(enums.NotificationTypes.TRACK, event_key, user_id, attributes, event_tags, conversion_event)
Send conversion event to Optimizely. Args: event_key: Event key representing the event which needs to be recorded. user_id: ID for user. attributes: Dict representing visitor attributes and values which need to be recorded. event_tags: Dict representing metadata associated with the event.
380,507
def constraint_from_choices(cls, value_type: type, choices: collections.Sequence): choices_str = .join(map(str, choices)) def constraint(value): value = value_type(value) if value not in choices: raise ParameterError( % choices_str) return value constraint.__name__ = % value_type.__name__ constraint.__doc__ = % choices_str return constraint
Returns a constraint callable based on choices of a given type
380,508
def validate(self): valids = [getattr(self, valid) for valid in sorted(dir(self.__class__)) if valid.startswith()] for is_valid in valids: if not is_valid(): docstring = .join( line.strip() for line in is_valid.__doc__.splitlines()) doc = docstring.format(**vars(self)) raise ValueError(doc)
Apply the `is_valid` methods to self and possibly raise a ValueError.
380,509
def _set_random_detect(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=random_detect.random_detect, is_container=, presence=False, yang_name="random-detect", rest_name="random-detect", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: u, u: None}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "container", : , }) self.__random_detect = t if hasattr(self, ): self._set()
Setter method for random_detect, mapped from YANG variable /interface/ethernet/qos/random_detect (container) If this variable is read-only (config: false) in the source YANG file, then _set_random_detect is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_random_detect() directly.
380,510
def _IncrementNestLevel(): if getattr(_import_local, , None) is None: _import_local.nest_level = 0 if _import_local.nest_level == 0: _import_local.names = set() _import_local.nest_level += 1
Increments the per thread nest level of imports.
380,511
def get_points(self, measurement=None, tags=None): if not isinstance(measurement, (bytes, type(b.decode()), type(None))): raise TypeError() for series in self._get_series(): series_name = series.get(, series.get(, )) if series_name is None: for item in self._get_points_for_series(series): if tags is None or \ self._tag_matches(item, tags) or \ self._tag_matches(series_tags, tags): yield item
Return a generator for all the points that match the given filters. :param measurement: The measurement name :type measurement: str :param tags: Tags to look for :type tags: dict :return: Points generator
380,512
def check_partition_column(partition_column, cols): for k, v in cols.items(): if k == partition_column: if v == "int": return else: raise InvalidPartitionColumn( "partition_column must be int, and not {0}".format(v) ) raise InvalidPartitionColumn( "partition_column {0} not found in the query".format(partition_column) )
Check partition_column existence and type Args: partition_column: partition_column name cols: dict with columns names and python types Returns: None
380,513
def add_domain_name(list_name, item_name): * payload = {"jsonrpc": "2.0", "id": "ID0", "method": "add_policy_domain_names", "params": [list_name, {"item_name": item_name}]} response = __proxy__[](payload, True) return _validate_change_result(response)
Adds a domain name to a domain name list. list_name(str): The name of the specific policy domain name list to append to. item_name(str): The domain name to append. CLI Example: .. code-block:: bash salt '*' bluecoat_sslv.add_domain_name MyDomainName foo.bar.com
380,514
def list_sebool(): * bdata = __salt__[]().splitlines() ret = {} for line in bdata[1:]: if not line.strip(): continue comps = line.split() ret[comps[0]] = {: comps[1][1:], : comps[3][:-1], : .join(comps[4:])} return ret
Return a structure listing all of the selinux booleans on the system and what state they are in CLI Example: .. code-block:: bash salt '*' selinux.list_sebool
380,515
def set_ortho_choice(self, small_asset_data, large_asset_data, name=): o3d_asset_id = self.create_o3d_asset(manip=None, small_ov_set=small_asset_data, large_ov_set=large_asset_data, display_name=name) self.add_choice(o3d_asset_id, name=name)
stub
380,516
def load(self, label): objloc = .format(self.statedir, label) try: obj = pickle.load(open(objloc, )) except (KeyError, IndexError, EOFError): obj = open(objloc, ).read() try: obj = float(obj) except ValueError: pass except IOError: obj = None return obj
Load obj with give label from hidden state directory
380,517
def get_unit_hostnames(self, units): host_names = {} for unit in units: host_names[unit.info[]] = \ str(unit.file_contents().strip()) self.log.debug(.format(host_names)) return host_names
Return a dict of juju unit names to hostnames.
380,518
def direction(theta, phi): return np.array([np.cos(phi) * np.sin(theta), np.sin(phi) * np.sin(theta), np.cos(theta)])
Return the direction vector of a cylinder defined by the spherical coordinates theta and phi.
380,519
def iterDiffs(self): nodes = self.nodes.values() nodes.sort(key=lambda node: self._height(node)) for node in nodes: yield node.diff
Return all diffs used in optimal network.
380,520
def list_group_categories_for_context_courses(self, course_id): path = {} data = {} params = {} path["course_id"] = course_id self.logger.debug("GET /api/v1/courses/{course_id}/group_categories with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("GET", "/api/v1/courses/{course_id}/group_categories".format(**path), data=data, params=params, all_pages=True)
List group categories for a context. Returns a list of group categories in a context
380,521
def handle_request(self): try: request, client_address = self.get_request() except socket.error: return if self.verify_request(request, client_address): self.requests.put((request, client_address))
simply collect requests and put them on the queue for the workers.
380,522
def parent(self, parent): self._name = self.unique_name(self._name) self._full_name = None self._logger = None self._parent = parent
Sets the parent of the actor. :param parent: the parent :type parent: Actor
380,523
def euclidean_distance_square(point1, point2): distance = 0.0 for i in range(len(point1)): distance += (point1[i] - point2[i]) ** 2.0 return distance
! @brief Calculate square Euclidean distance between two vectors. \f[ dist(a, b) = \sum_{i=0}^{N}(a_{i} - b_{i})^{2}; \f] @param[in] point1 (array_like): The first vector. @param[in] point2 (array_like): The second vector. @return (double) Square Euclidean distance between two vectors. @see euclidean_distance, manhattan_distance, chebyshev_distance
380,524
def writeRaw8(self, value): value = value & 0xFF self._bus.write_byte(self._address, value) self._logger.debug("Wrote 0x%02X", value)
Write an 8-bit value on the bus (without register).
380,525
def movies_directed_by(self, director): return [movie for movie in self._movie_finder.find_all() if movie.director == director]
Return list of movies that were directed by certain person. :param director: Director's name :type director: str :rtype: list[movies.models.Movie] :return: List of movie instances.
380,526
def cert_info(cert, digest=): * date_fmt = if not in cert: with salt.utils.files.fopen(cert) as cert_file: cert = cert_file.read() cert = OpenSSL.crypto.load_certificate( OpenSSL.crypto.FILETYPE_PEM, cert ) issuer = {} for key, value in cert.get_issuer().get_components(): if isinstance(key, bytes): key = salt.utils.stringutils.to_unicode(key) if isinstance(value, bytes): value = salt.utils.stringutils.to_unicode(value) issuer[key] = value subject = {} for key, value in cert.get_subject().get_components(): if isinstance(key, bytes): key = salt.utils.stringutils.to_unicode(key) if isinstance(value, bytes): value = salt.utils.stringutils.to_unicode(value) subject[key] = value ret = { : salt.utils.stringutils.to_unicode( cert.digest(salt.utils.stringutils.to_str(digest)) ), : subject, : issuer, : cert.get_serial_number(), : calendar.timegm(time.strptime( str(cert.get_notBefore().decode(__salt_system_encoding__)), date_fmt)), : calendar.timegm(time.strptime( cert.get_notAfter().decode(__salt_system_encoding__), date_fmt)), } if hasattr(cert, ): ret[] = {} for i in _range(cert.get_extension_count()): try: ext = cert.get_extension(i) key = salt.utils.stringutils.to_unicode(ext.get_short_name()) ret[][key] = str(ext).strip() except AttributeError: continue if in ret.get(, {}): valid_entries = (, ) valid_names = set() for name in str(ret[][]).split(): entry, name = name.split(, 1) if entry not in valid_entries: log.error( , ret[], name, .join(valid_entries)) else: valid_names.add(name) ret[] = list(valid_names) if hasattr(cert, ): try: value = cert.get_signature_algorithm() if isinstance(value, bytes): value = salt.utils.stringutils.to_unicode(value) ret[] = value except AttributeError: pass return ret
Return information for a particular certificate cert path to the certifiate PEM file or string .. versionchanged:: 2018.3.4 digest what digest to use for fingerprinting CLI Example: .. code-block:: bash salt '*' tls.cert_info /dir/for/certs/cert.pem
380,527
def _move_here(self): cu = self.scraper.current_item if self is cu: return if cu.items and self in cu.items: self.scraper.move_to(self) return if self is cu.parent: self.scraper.move_up() if self.parent and self in self.parent.items: self.scraper.move_up() self.scraper.move_to(self) return self.scraper.move_to_top() for step in self.path: self.scraper.move_to(step)
Move the cursor to this item.
380,528
def get_ip_info(ip: str, exceptions: bool=False, timeout: int=10) -> tuple: import traceback import socket if not ip: return None, , host = country_code = get_geo_ip(ip, exceptions=exceptions, timeout=timeout).get(, ) try: res = socket.gethostbyaddr(ip) host = res[0][:255] if ip else except Exception as e: msg = .format(ip, traceback.format_exc()) logger.error(msg) if exceptions: raise e return ip, country_code, host
Returns (ip, country_code, host) tuple of the IP address. :param ip: IP address :param exceptions: Raise Exception or not :param timeout: Timeout in seconds. Note that timeout only affects geo IP part, not getting host name. :return: (ip, country_code, host)
380,529
def moist_lapse(pressure, temperature, ref_pressure=None): r def dt(t, p): t = units.Quantity(t, temperature.units) p = units.Quantity(p, pressure.units) rs = saturation_mixing_ratio(p, t) frac = ((mpconsts.Rd * t + mpconsts.Lv * rs) / (mpconsts.Cp_d + (mpconsts.Lv * mpconsts.Lv * rs * mpconsts.epsilon / (mpconsts.Rd * t * t)))).to() return frac / p if ref_pressure is None: ref_pressure = pressure[0] pressure = pressure.to() ref_pressure = ref_pressure.to() temperature = atleast_1d(temperature) side = pres_decreasing = (pressure[0] > pressure[-1]) if pres_decreasing: pressure = pressure[::-1] side = ref_pres_idx = np.searchsorted(pressure.m, ref_pressure.m, side=side) ret_temperatures = np.empty((0, temperature.shape[0])) if ref_pressure > pressure.min(): pres_down = np.append(ref_pressure, pressure[(ref_pres_idx - 1)::-1]) trace_down = si.odeint(dt, temperature.squeeze(), pres_down.squeeze()) ret_temperatures = np.concatenate((ret_temperatures, trace_down[:0:-1])) if ref_pressure < pressure.max(): pres_up = np.append(ref_pressure, pressure[ref_pres_idx:]) trace_up = si.odeint(dt, temperature.squeeze(), pres_up.squeeze()) ret_temperatures = np.concatenate((ret_temperatures, trace_up[1:])) if pres_decreasing: ret_temperatures = ret_temperatures[::-1] return units.Quantity(ret_temperatures.T.squeeze(), temperature.units)
r"""Calculate the temperature at a level assuming liquid saturation processes. This function lifts a parcel starting at `temperature`. The starting pressure can be given by `ref_pressure`. Essentially, this function is calculating moist pseudo-adiabats. Parameters ---------- pressure : `pint.Quantity` The atmospheric pressure level(s) of interest temperature : `pint.Quantity` The starting temperature ref_pressure : `pint.Quantity`, optional The reference pressure. If not given, it defaults to the first element of the pressure array. Returns ------- `pint.Quantity` The temperature corresponding to the starting temperature and pressure levels. See Also -------- dry_lapse : Calculate parcel temperature assuming dry adiabatic processes parcel_profile : Calculate complete parcel profile Notes ----- This function is implemented by integrating the following differential equation: .. math:: \frac{dT}{dP} = \frac{1}{P} \frac{R_d T + L_v r_s} {C_{pd} + \frac{L_v^2 r_s \epsilon}{R_d T^2}} This equation comes from [Bakhshaii2013]_.
380,530
def text_search(self, text, sort=None, offset=100, page=1): assert page >= 1, f payload = {"text": text, "sort": sort, "offset": offset, "page": page} response = self.requests_session.get( f, params=payload, headers=self._headers ) if response.status_code == 200: return self._parse_search_response(response.content) else: raise Exception(f)
Search in aquarius using text query. Given the string aquarius will do a full-text query to search in all documents. Currently implemented are the MongoDB and Elastic Search drivers. For a detailed guide on how to search, see the MongoDB driver documentation: mongodb driverCurrently implemented in: https://docs.mongodb.com/manual/reference/operator/query/text/ And the Elastic Search documentation: https://www.elastic.co/guide/en/elasticsearch/guide/current/full-text-search.html Other drivers are possible according to each implementation. :param text: String to be search. :param sort: 1/-1 to sort ascending or descending. :param offset: Integer with the number of elements displayed per page. :param page: Integer with the number of page. :return: List of DDO instance
380,531
def on_line(client, line): if line.startswith("PING"): client.send("PONG" + line[4:]) return True if line.startswith(":"): actor, _, line = line[1:].partition(" ") else: actor = None command, _, args = line.partition(" ") command = NUMERIC_EVENTS.get(command, command) parser = PARSERS.get(command, False) if parser: parser(client, command, actor, args) return True elif parser is False: return True
Default handling for incoming lines. This handler will automatically manage the following IRC messages: PING: Responds with a PONG. PRIVMSG: Dispatches the PRIVMSG event. NOTICE: Dispatches the NOTICE event. MOTDSTART: Initializes MOTD receive buffer. MOTD: Appends a line to the MOTD receive buffer. ENDOFMOTD: Joins the contents of the MOTD receive buffer, assigns the result to the .motd of the server, and dispatches the MOTD event.
380,532
def disconnect_channel(self, destination_id): if destination_id in self._open_channels: try: self.send_message( destination_id, NS_CONNECTION, {MESSAGE_TYPE: TYPE_CLOSE, : {}}, no_add_request_id=True, force=True) except NotConnected: pass except Exception: self.logger.exception("[%s:%s] Exception", self.fn or self.host, self.port) self._open_channels.remove(destination_id) self.handle_channel_disconnected()
Disconnect a channel with destination_id.
380,533
def GetRealPath(filename): if os.path.isabs(filename): return filename if filename.startswith() or filename.startswith(): return os.path.abspath(filename) path = os.getenv(, ) for directory in path.split(): tryname = os.path.join(directory, filename) if os.path.exists(tryname): if not os.path.isabs(directory): return os.path.abspath(tryname) return tryname if os.path.exists(filename): return os.path.abspath(filename) return None
Given an executable filename, find in the PATH or find absolute path. Args: filename An executable filename (string) Returns: Absolute version of filename. None if filename could not be found locally, absolutely, or in PATH
380,534
def forget_canvas(canvas): cc = [c() for c in canvasses if c() is not None] while canvas in cc: cc.remove(canvas) canvasses[:] = [weakref.ref(c) for c in cc]
Forget about the given canvas. Used by the canvas when closed.
380,535
def save(self, filename, fformat=None, fill_value=None, compute=True, keep_palette=False, cmap=None, **format_kwargs): fformat = fformat or os.path.splitext(filename)[1][1:4] if fformat in (, ) and rasterio: return self.rio_save(filename, fformat=fformat, fill_value=fill_value, compute=compute, keep_palette=keep_palette, cmap=cmap, **format_kwargs) else: return self.pil_save(filename, fformat, fill_value, compute=compute, **format_kwargs)
Save the image to the given *filename*. Args: filename (str): Output filename fformat (str): File format of output file (optional). Can be one of many image formats supported by the `rasterio` or `PIL` libraries ('jpg', 'png', 'tif'). By default this is determined by the extension of the provided filename. If the format allows, geographical information will be saved to the ouput file, in the form of grid mapping or ground control points. fill_value (float): Replace invalid data values with this value and do not produce an Alpha band. Default behavior is to create an alpha band. compute (bool): If True (default) write the data to the file immediately. If False the return value is either a `dask.Delayed` object or a tuple of ``(source, target)`` to be passed to `dask.array.store`. keep_palette (bool): Saves the palettized version of the image if set to True. False by default. cmap (Colormap or dict): Colormap to be applied to the image when saving with rasterio, used with keep_palette=True. Should be uint8. format_kwargs: Additional format options to pass to `rasterio` or `PIL` saving methods. Returns: Either `None` if `compute` is True or a `dask.Delayed` object or ``(source, target)`` pair to be passed to `dask.array.store`. If compute is False the return value depends on format and how the image backend is used. If ``(source, target)`` is provided then target is an open file-like object that must be closed by the caller.
380,536
def _get_satisfiability_smt_script(self, constraints=(), variables=()): smt_script = smt_script += self._smtlib_exprs(variables) smt_script += self._smtlib_exprs(constraints) smt_script += return smt_script
Returns a SMT script that declare all the symbols and constraint and checks their satisfiability (check-sat) :param extra-constraints: list of extra constraints that we want to evaluate only in the scope of this call :return string: smt-lib representation of the script that checks the satisfiability
380,537
def expand_path(path): return os.path.abspath(os.path.expandvars(os.path.expanduser(path)))
Returns ``path`` as an absolute path with ~user and env var expansion applied. :API: public
380,538
def get_file(self, secure_data_path, version=None): query = self._get_file(secure_data_path, version) resp = query.headers.copy() resp = self._parse_metadata_filename(resp) resp[] = query.content return resp
Return a requests.structures.CaseInsensitiveDict object containing a file and the metadata/header information around it. The binary data of the file is under the key 'data'
380,539
def force_delete(self): self.__force_deleting__ = True self.delete() self.__force_deleting__ = False
Force a hard delete on a soft deleted model.
380,540
def add_caveats(self, cavs, key, loc): if cavs is None: return for cav in cavs: self.add_caveat(cav, key, loc)
Add an array of caveats to the macaroon. This method does not mutate the current object. @param cavs arrary of caveats. @param key the PublicKey to encrypt third party caveat. @param loc locator to find the location object that has a method third_party_info.
380,541
def run(self, inputs, **kwargs): input_data = np.asarray(inputs[0], dtype=) if self.device == : ctx = mx.cpu() else: raise NotImplementedError("Only CPU context is supported for now") mod = mx.mod.Module(symbol=self.symbol, data_names=[], context=ctx, label_names=None) mod.bind(for_training=False, data_shapes=[(, input_data.shape)], label_shapes=None) mod.set_params(arg_params=self.params, aux_params=None) batch = namedtuple(, []) mod.forward(batch([mx.nd.array(input_data)])) result = mod.get_outputs()[0].asnumpy() return [result]
Run model inference and return the result Parameters ---------- inputs : numpy array input to run a layer on Returns ------- params : numpy array result obtained after running the inference on mxnet
380,542
def get_method_name(method): name = get_object_name(method) if name.startswith("__") and not name.endswith("__"): name = "_{0}{1}".format(get_object_name(method.im_class), name) return name
Returns given method name. :param method: Method to retrieve the name. :type method: object :return: Method name. :rtype: unicode
380,543
def setup_resource(self): template = self.template variables = self.get_variables() tclass = variables[] tprops = variables[] output = variables[] klass = load_object_from_string( + tclass) instance = klass.from_dict(, tprops) template.add_resource(instance) template.add_output(Output( output, Description="A reference to the object created in this blueprint", Value=Ref(instance) ))
Setting Up Resource
380,544
def is_readable(path): if os.access(path, os.R_OK): LOGGER.debug("> path is readable.".format(path)) return True else: LOGGER.debug("> path is not readable.".format(path)) return False
Returns if given path is readable. :param path: Path to check access. :type path: unicode :return: Is path writable. :rtype: bool
380,545
def list(self): import IPython data = [{: version[].split()[-1], : version[], : version[]} for version in self.get_iterator()] IPython.display.display( datalab.utils.commands.render_dictionary(data, [, , ]))
List versions under the current model in a table view. Raises: Exception if it is called in a non-IPython environment.
380,546
def stat(self, follow_symlinks=True): return self._system.stat( path=self._path, client_kwargs=self._client_kwargs, header=self._header)
Return a stat_result object for this entry. The result is cached on the os.DirEntry object. Args: follow_symlinks (bool): Follow symlinks. Not supported on cloud storage objects. Returns: os.stat_result: Stat result object
380,547
def xrefs_from(self): for line in self.lines: for xref in line.xrefs_from: if xref.type.is_flow: continue if xref.to in self and xref.iscode: continue yield xref
Xrefs from the function. This includes the xrefs from every line in the function, as `Xref` objects. Xrefs are filtered to exclude code references that are internal to the function. This means that every xrefs to the function's code will NOT be returned (yet, references to the function's data will be returnd). To get those extra xrefs, you need to iterate the function's lines yourself.
380,548
def _computeStatus(self, dfile, service): if service: if not dfile[].has_key(service): return self.ST_UNTRACKED else: return dfile[][service][] first_service_key=dfile[].keys()[0] first_status=dfile[][first_service_key][] all_status_match=True for service in dfile[]: if dfile[][service][]!=first_status: return self.ST_COMPLICATED return first_status
Computes status for file, basically this means if more than one service handles the file, it will place a 'C' (for complicated) otherwise if status matches between all services, will place that status
380,549
def on_finished(self): self.controller.is_running = False error = self.controller.current_error if error is not None: self.info(self.tr("Stopped due to error(s), see Terminal.")) else: self.info(self.tr("Finished successfully!"))
Finished signal handler
380,550
def info(name): * info = __salt__[](name=name) ret = {: name, : , : , : , : , : , : , : } if info: ret = {: info[], : , : info[], : , : , : , : , : info[]} return ret
Return information for the specified user This is just returns dummy data so that salt states can work. :param str name: The name of the user account to show. CLI Example: .. code-block:: bash salt '*' shadow.info root
380,551
def _modify(self, **patch): legacy = patch.pop(, False) tmos_ver = self._meta_data[]._meta_data[] self._filter_version_specific_options(tmos_ver, **patch) if not in self._meta_data[] and \ LooseVersion(tmos_ver) >= LooseVersion() and \ not legacy: msg = raise OperationNotSupportedOnPublishedPolicy(msg) super(Policy, self)._modify(**patch)
Modify only draft or legacy policies Published policies cannot be modified :raises: OperationNotSupportedOnPublishedPolicy
380,552
def rejoin_lines(nb): for ws in nb.worksheets: for cell in ws.cells: if cell.cell_type == : if in cell and isinstance(cell.input, list): cell.input = _join_lines(cell.input) for output in cell.outputs: for key in _multiline_outputs: item = output.get(key, None) if isinstance(item, list): output[key] = _join_lines(item) else: for key in [, ]: item = cell.get(key, None) if isinstance(item, list): cell[key] = _join_lines(item) return nb
rejoin multiline text into strings For reversing effects of ``split_lines(nb)``. This only rejoins lines that have been split, so if text objects were not split they will pass through unchanged. Used when reading JSON files that may have been passed through split_lines.
380,553
def _prepare_reserved_tokens(reserved_tokens): reserved_tokens = [tf.compat.as_text(tok) for tok in reserved_tokens or []] dups = _find_duplicates(reserved_tokens) if dups: raise ValueError("Duplicates found in tokens: %s" % dups) reserved_tokens_re = _make_reserved_tokens_re(reserved_tokens) return reserved_tokens, reserved_tokens_re
Prepare reserved tokens and a regex for splitting them out of strings.
380,554
def update(self, iterable): e = self.evolver() for element in iterable: e.add(element) return e.persistent()
Return a new PSet with elements in iterable added >>> s1 = s(1, 2) >>> s1.update([3, 4, 4]) pset([1, 2, 3, 4])
380,555
def curve_reduce_approx(curve, reduced): if NO_IMAGES: return ax = curve.plot(256) color = ax.lines[-1].get_color() add_patch(ax, curve._nodes, color, alpha=0.25, node_color=color) reduced.plot(256, ax=ax) color = ax.lines[-1].get_color() add_patch(ax, reduced._nodes, color, alpha=0.25, node_color=color) ax.axis("scaled") _plot_helpers.add_plot_boundary(ax) save_image(ax.figure, "curve_reduce_approx.png")
Image for :meth:`.curve.Curve.reduce` docstring.
380,556
def create_summary_tear_sheet(factor_data, long_short=True, group_neutral=False): mean_quant_ret, std_quantile = \ perf.mean_return_by_quantile(factor_data, by_group=False, demeaned=long_short, group_adjust=group_neutral) mean_quant_rateret = \ mean_quant_ret.apply(utils.rate_of_return, axis=0, base_period=mean_quant_ret.columns[0]) mean_quant_ret_bydate, std_quant_daily = \ perf.mean_return_by_quantile(factor_data, by_date=True, by_group=False, demeaned=long_short, group_adjust=group_neutral) mean_quant_rateret_bydate = mean_quant_ret_bydate.apply( utils.rate_of_return, axis=0, base_period=mean_quant_ret_bydate.columns[0] ) compstd_quant_daily = std_quant_daily.apply( utils.std_conversion, axis=0, base_period=std_quant_daily.columns[0] ) alpha_beta = perf.factor_alpha_beta(factor_data, demeaned=long_short, group_adjust=group_neutral) mean_ret_spread_quant, std_spread_quant = perf.compute_mean_returns_spread( mean_quant_rateret_bydate, factor_data[].max(), factor_data[].min(), std_err=compstd_quant_daily) periods = utils.get_forward_returns_columns(factor_data.columns) fr_cols = len(periods) vertical_sections = 2 + fr_cols * 3 gf = GridFigure(rows=vertical_sections, cols=1) plotting.plot_quantile_statistics_table(factor_data) plotting.plot_returns_table(alpha_beta, mean_quant_rateret, mean_ret_spread_quant) plotting.plot_quantile_returns_bar(mean_quant_rateret, by_group=False, ylim_percentiles=None, ax=gf.next_row()) ic = perf.factor_information_coefficient(factor_data) plotting.plot_information_table(ic) quantile_factor = factor_data[] quantile_turnover = \ {p: pd.concat([perf.quantile_turnover(quantile_factor, q, p) for q in range(1, int(quantile_factor.max()) + 1)], axis=1) for p in periods} autocorrelation = pd.concat( [perf.factor_rank_autocorrelation(factor_data, period) for period in periods], axis=1) plotting.plot_turnover_table(autocorrelation, quantile_turnover) plt.show() gf.close()
Creates a small summary tear sheet with returns, information, and turnover analysis. Parameters ---------- factor_data : pd.DataFrame - MultiIndex A MultiIndex DataFrame indexed by date (level 0) and asset (level 1), containing the values for a single alpha factor, forward returns for each period, the factor quantile/bin that factor value belongs to, and (optionally) the group the asset belongs to. - See full explanation in utils.get_clean_factor_and_forward_returns long_short : bool Should this computation happen on a long short portfolio? if so, then mean quantile returns will be demeaned across the factor universe. group_neutral : bool Should this computation happen on a group neutral portfolio? if so, returns demeaning will occur on the group level.
380,557
def add(self, name: str, pattern: str) -> None: self.patterns[name] = URITemplate( pattern, converters=self.converters)
add url pattern for name
380,558
def process_iter(): def add(pid): proc = Process(pid) _pmap[proc.pid] = proc return proc def remove(pid): _pmap.pop(pid, None) a = set(get_pid_list()) b = set(_pmap.keys()) new_pids = a - b gone_pids = b - a for pid in gone_pids: remove(pid) for pid, proc in sorted(list(_pmap.items()) + \ list(dict.fromkeys(new_pids).items())): try: if proc is None: yield add(pid) else: if proc.is_running(): yield proc else: yield add(pid) except NoSuchProcess: remove(pid) except AccessDenied: yield proc
Return a generator yielding a Process class instance for all running processes on the local machine. Every new Process instance is only created once and then cached into an internal table which is updated every time this is used. The sorting order in which processes are yielded is based on their PIDs.
380,559
def render_template(self): self._parse_paths() context = dict(napp=self._napp.__dict__, paths=self._paths) self._save(context)
Render and save API doc in openapi.yml.
380,560
def rpc_get_name_record(self, name, **con_info): res = None if check_name(name): res = self.get_name_record(name, include_expired=True, include_history=False) elif check_subdomain(name): res = self.get_subdomain_record(name, include_history=False) else: return {: , : 400} if in res: return {: res[], : 404} did_info = None did = None if check_name(name): did_info = self.get_name_DID_info(name) elif check_subdomain(name): did_info = self.get_subdomain_DID_info(name) else: return {: , : 400} if did_info is not None: did = make_DID(did_info[], did_info[], did_info[]) res[][] = did return self.success_response({: res[]})
Get the curernt state of a name or subdomain, excluding its history. Return {'status': True, 'record': rec} on success Return {'error': ...} on error
380,561
def dispatch(self, *args, **kwargs): if not self.registration_allowed(): return HttpResponseRedirect(force_text(self.disallowed_url)) return super(RegistrationView, self).dispatch(*args, **kwargs)
Check that user signup is allowed before even bothering to dispatch or do other processing.
380,562
def supports_currency_type(self, currency_type): if self._kwargs[] not in []: raise errors.IllegalState() return currency_type in self.get_currency_types
Tests if the given currency type is supported. arg: currency_type (osid.type.Type): a currency Type return: (boolean) - ``true`` if the type is supported, ``false`` otherwise raise: IllegalState - syntax is not a ``CURRENCY`` raise: NullArgument - ``currency_type`` is ``null`` *compliance: mandatory -- This method must be implemented.*
380,563
def _apply(self, f, grouper=None, *args, **kwargs): def func(x): x = self._shallow_copy(x, groupby=self.groupby) if isinstance(f, str): return getattr(x, f)(**kwargs) return x.apply(f, *args, **kwargs) result = self._groupby.apply(func) return self._wrap_result(result)
Dispatch to _upsample; we are stripping all of the _upsample kwargs and performing the original function call on the grouped object.
380,564
def create_element(tag: str, name: str = None, base: type = None, attr: dict = None) -> Node: from wdom.web_node import WdomElement from wdom.tag import Tag from wdom.window import customElements if attr is None: attr = {} if name: base_class = customElements.get((name, tag)) else: base_class = customElements.get((tag, None)) if base_class is None: attr[] = False base_class = base or WdomElement if issubclass(base_class, Tag): return base_class(**attr) return base_class(tag, **attr)
Create element with a tag of ``name``. :arg str name: html tag. :arg type base: Base class of the created element (defatlt: ``WdomElement``) :arg dict attr: Attributes (key-value pairs dict) of the new element.
380,565
def create(cls, name, certificate): json = {: name, : certificate if pem_as_string(certificate) else \ load_cert_chain(certificate)[0][1].decode()} return ElementCreator(cls, json)
Create a TLS CA. The certificate must be compatible with OpenSSL and be in PEM format. The certificate can be either a file with the Root CA, or a raw string starting with BEGIN CERTIFICATE, etc. When creating a TLS CA, you must also import the CA certificate. Once the CA is created, it is possible to import a different certificate to map to the CA if necessary. :param str name: name of root CA :param str,file certificate: The root CA contents :raises CreateElementFailed: failed to create the root CA :raises ValueError: if loading from file and no certificates present :raises IOError: cannot find specified file for certificate :rtype: TLSCertificateAuthority
380,566
def add_files_to_git_repository(base_dir, files, description): if not os.path.isdir(base_dir): printOut() return gitRoot = subprocess.Popen([, , ], cwd=base_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = gitRoot.communicate()[0] if gitRoot.returncode != 0: printOut() return gitRootDir = decode_to_string(stdout).splitlines()[0] gitStatus = subprocess.Popen([,,, ], cwd=gitRootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = gitStatus.communicate() if gitStatus.returncode != 0: printOut( + decode_to_string(stderr)) return if stdout: printOut() return files = [os.path.realpath(file) for file in files] gitAdd = subprocess.Popen([, , , ] + files, cwd=gitRootDir) if gitAdd.wait() != 0: printOut() return printOut( + gitRootDir) gitCommit = subprocess.Popen([, , , ], cwd=gitRootDir, stdin=subprocess.PIPE) gitCommit.communicate(description.encode()) if gitCommit.returncode != 0: printOut() return
Add and commit all files given in a list into a git repository in the base_dir directory. Nothing is done if the git repository has local changes. @param files: the files to commit @param description: the commit message
380,567
def load_configuration(configuration): if isinstance(configuration, dict): return configuration else: with open(configuration) as configfile: return json.load(configfile)
Returns a dictionary, accepts a dictionary or a path to a JSON file.
380,568
def getElementsWithAttrValues(self, attrName, attrValues): elements = [] for child in self.children: if child.getAttribute(attrName) in attrValues: elements.append(child) elements += child.getElementsWithAttrValues(attrName, attrValues) return TagCollection(elements)
getElementsWithAttrValues - Search children of this tag for tags with an attribute name and one of several values @param attrName <lowercase str> - Attribute name (lowercase) @param attrValues set<str> - set of acceptable attribute values @return - TagCollection of matching elements
380,569
def save_data(self, trigger_id, **data): title, content = super(ServiceEvernote, self).save_data(trigger_id, **data) trigger = Evernote.objects.get(trigger_id=trigger_id) note_store = self._notestore(trigger_id, data) if isinstance(note_store, evernote.api.client.Store): note = self._notebook(trigger, note_store) note = self._attributes(note, data) content = self._footer(trigger, data, content) note.title = limit_content(title, 255) note = self._content(note, content) return EvernoteMgr.create_note(note_store, note, trigger_id, data) else: return note_store
let's save the data don't want to handle empty title nor content otherwise this will produce an Exception by the Evernote's API :param trigger_id: trigger ID from which to save data :param data: the data to check to be used and save :type trigger_id: int :type data: dict :return: the status of the save statement :rtype: boolean
380,570
def is_instance_of(self, some_class): try: if not isinstance(self.val, some_class): if hasattr(self.val, ): t = self.val.__name__ elif hasattr(self.val, ): t = self.val.__class__.__name__ else: t = self._err( % (self.val, t, some_class.__name__)) except TypeError: raise TypeError() return self
Asserts that val is an instance of the given class.
380,571
def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1)) if result > self._wait_incrementing_max: result = self._wait_incrementing_max if result < 0: result = 0 return result
Sleep an incremental amount of time after each attempt, starting at wait_incrementing_start and incrementing by wait_incrementing_increment
380,572
def create_script_staging_table(self, output_table, col_list): self.ddl_text += self.ddl_text += + output_table + self.ddl_text += self.ddl_text += + output_table + self.ddl_text += + output_table + self.ddl_text += .join([col + for col in col_list]) self.ddl_text += + self.date_updated_col + self.ddl_text +=
appends the CREATE TABLE, index etc to another table
380,573
def fix_paths(self, d, root=None, project=None): if root is None and project is None: project = d.get() if project is not None: root = self[project][] else: root = d[] elif root is None: root = self[project][] elif project is None: pass paths = self.paths for key, val in d.items(): if isinstance(val, dict): d[key] = self.fix_paths(val, root, project) elif key in paths: val = d[key] if isinstance(val, six.string_types) and not osp.isabs(val): d[key] = osp.join(root, val) elif (isinstance(utils.safe_list(val)[0], six.string_types) and not osp.isabs(val[0])): for i in range(len(val)): val[i] = osp.join(root, val[i]) return d
Fix the paths in the given dictionary to get absolute paths Parameters ---------- %(ExperimentsConfig.fix_paths.parameters)s Returns ------- %(ExperimentsConfig.fix_paths.returns)s Notes ----- d is modified in place!
380,574
def create(self, name, network): if not network in SUPPORTED_NETWORKS: raise ValueError() account = self.wrap(self.resource.create(dict(name=name, network=network))) self.add(account) return account
Create a new Account object and add it to this Accounts collection. Args: name (str): Account name network (str): Type of cryptocurrency. Can be one of, 'bitcoin', ' bitcoin_testnet', 'litecoin', 'dogecoin'. Returns: The new round.Account
380,575
def iter_admin_log( self, entity, limit=None, *, max_id=0, min_id=0, search=None, admins=None, join=None, leave=None, invite=None, restrict=None, unrestrict=None, ban=None, unban=None, promote=None, demote=None, info=None, settings=None, pinned=None, edit=None, delete=None): return _AdminLogIter( self, limit, entity=entity, admins=admins, search=search, min_id=min_id, max_id=max_id, join=join, leave=leave, invite=invite, restrict=restrict, unrestrict=unrestrict, ban=ban, unban=unban, promote=promote, demote=demote, info=info, settings=settings, pinned=pinned, edit=edit, delete=delete )
Iterator over the admin log for the specified channel. Note that you must be an administrator of it to use this method. If none of the filters are present (i.e. they all are ``None``), *all* event types will be returned. If at least one of them is ``True``, only those that are true will be returned. Args: entity (`entity`): The channel entity from which to get its admin log. limit (`int` | `None`, optional): Number of events to be retrieved. The limit may also be ``None``, which would eventually return the whole history. max_id (`int`): All the events with a higher (newer) ID or equal to this will be excluded. min_id (`int`): All the events with a lower (older) ID or equal to this will be excluded. search (`str`): The string to be used as a search query. admins (`entity` | `list`): If present, the events will be filtered by these admins (or single admin) and only those caused by them will be returned. join (`bool`): If ``True``, events for when a user joined will be returned. leave (`bool`): If ``True``, events for when a user leaves will be returned. invite (`bool`): If ``True``, events for when a user joins through an invite link will be returned. restrict (`bool`): If ``True``, events with partial restrictions will be returned. This is what the API calls "ban". unrestrict (`bool`): If ``True``, events removing restrictions will be returned. This is what the API calls "unban". ban (`bool`): If ``True``, events applying or removing all restrictions will be returned. This is what the API calls "kick" (restricting all permissions removed is a ban, which kicks the user). unban (`bool`): If ``True``, events removing all restrictions will be returned. This is what the API calls "unkick". promote (`bool`): If ``True``, events with admin promotions will be returned. demote (`bool`): If ``True``, events with admin demotions will be returned. info (`bool`): If ``True``, events changing the group info will be returned. settings (`bool`): If ``True``, events changing the group settings will be returned. pinned (`bool`): If ``True``, events of new pinned messages will be returned. edit (`bool`): If ``True``, events of message edits will be returned. delete (`bool`): If ``True``, events of message deletions will be returned. Yields: Instances of `telethon.tl.custom.adminlogevent.AdminLogEvent`.
380,576
def phenotypes_actions(institute_id, case_name): institute_obj, case_obj = institute_and_case(store, institute_id, case_name) case_url = url_for(, institute_id=institute_id, case_name=case_name) action = request.form[] hpo_ids = request.form.getlist() user_obj = store.user(current_user.email) if action == : for hpo_id in hpo_ids: store.remove_phenotype(institute_obj, case_obj, user_obj, case_url, hpo_id) elif action == : if len(hpo_ids) == 0: hpo_ids = [term[] for term in case_obj.get(, [])] username = current_app.config[] password = current_app.config[] diseases = controllers.hpo_diseases(username, password, hpo_ids) return render_template(, diseases=diseases, institute=institute_obj, case=case_obj) elif action == : hgnc_symbols = set() for raw_symbols in request.form.getlist(): if raw_symbols: hgnc_symbols.update(raw_symbol.split(, 1)[0] for raw_symbol in raw_symbols.split()) store.update_dynamic_gene_list(case_obj, hgnc_symbols=hgnc_symbols) elif action == : if len(hpo_ids) == 0: hpo_ids = [term[] for term in case_obj.get(, [])] results = store.generate_hpo_gene_list(*hpo_ids) hpo_count = int(request.form.get() or 1) hgnc_ids = [result[0] for result in results if result[1] >= hpo_count] store.update_dynamic_gene_list(case_obj, hgnc_ids=hgnc_ids, phenotype_ids=hpo_ids) return redirect(case_url)
Perform actions on multiple phenotypes.
380,577
def decide(self, accepts, context_aware=False): mimetype = self.decide_mimetype(accepts, context_aware) if mimetype is not None: return (mimetype, self.get_serialize_format(mimetype)) else: return (None, None)
Returns what (mimetype,format) the client wants to receive Parses the given Accept header and picks the best one that we know how to output Returns (mimetype, format) An empty Accept will default to rdf+xml An Accept with */* use rdf+xml unless a better match is found An Accept that doesn't match anything will return (None,None) context_aware=True will allow nquad serialization
380,578
def sample(polygon, count, factor=1.5, max_iter=10): bounds = np.reshape(polygon.bounds, (2, 2)) extents = bounds.ptp(axis=0) hit = [] hit_count = 0 per_loop = int(count * factor) for i in range(max_iter): points = np.random.random((per_loop, 2)) points = (points * extents) + bounds[0] mask = vectorized.contains(polygon, *points.T) hit.append(points[mask]) hit_count += len(hit[-1]) if hit_count > count: break hit = np.vstack(hit)[:count] return hit
Use rejection sampling to generate random points inside a polygon. Parameters ----------- polygon : shapely.geometry.Polygon Polygon that will contain points count : int Number of points to return factor : float How many points to test per loop IE, count * factor max_iter : int, Maximum number of intersection loops to run, total points sampled is count * factor * max_iter Returns ----------- hit : (n, 2) float Random points inside polygon where n <= count
380,579
def load_experiment(folder, return_path=False): t load the config.json, but return it ' fullpath = os.path.abspath(folder) config = "%s/config.json" %(fullpath) if not os.path.exists(config): bot.error("config.json could not be found in %s" %(folder)) config = None if return_path is False and config is not None: config = read_json(config) return config
load_experiment: reads in the config.json for a folder, returns None if not found. :param folder: full path to experiment folder :param return_path: if True, don't load the config.json, but return it
380,580
def addDataset(self, dataset): item = XChartDatasetItem() self.addItem(item) item.setDataset(dataset) return item
Creates a new dataset instance for this scene. :param dataset | <XChartDataset> :return <XChartDatasetItem>
380,581
def resize(self, container, height, width): params = {: height, : width} url = self._url("/containers/{0}/resize", container) res = self._post(url, params=params) self._raise_for_status(res)
Resize the tty session. Args: container (str or dict): The container to resize height (int): Height of tty session width (int): Width of tty session Raises: :py:class:`docker.errors.APIError` If the server returns an error.
380,582
def cli(ctx, comment, metadata=""): return ctx.gi.cannedcomments.add_comment(comment, metadata=metadata)
Add a canned comment Output: A dictionnary containing canned comment description
380,583
def p_null_assignment(self, t): self.accu.add(Term(, [self.name,"gen(\""+t[1]+"\")","0"]))
null_assignment : IDENT EQ NULL
380,584
def hex_color(self, safe: bool = False) -> str: if safe: return self.random.choice(SAFE_COLORS) return .format( self.random.randint(0x000000, 0xffffff))
Generate a random hex color. :param safe: Get safe Flat UI hex color. :return: Hex color code. :Example: #d8346b
380,585
def _SetupBotoConfig(self): project_id = self._GetNumericProjectId() try: boto_config.BotoConfig(project_id, debug=self.debug) except (IOError, OSError) as e: self.logger.warning(str(e))
Set the boto config so GSUtil works with provisioned service accounts.
380,586
def _jaccard_similarity(f1, f2, weight_func): elements = set(f1) elements.update(f2) count, w_count, w_total = 0, 0, 0 for element in elements: mi = min(f1.get(element, 0), f2.get(element, 0)) mx = max(f1.get(element, 0), f2.get(element, 0)) count += mi w = weight_func(element) w_count += w * mi w_total += w * mx if count == 0: return None return 0.0 if w_total == 0.0 else w_count / w_total
Calculate generalized Jaccard similarity of formulas. Returns the weighted similarity value or None if there is no overlap at all. If the union of the formulas has a weight of zero (i.e. the denominator in the Jaccard similarity is zero), a value of zero is returned.
380,587
def choice(self, obj): tree_id = getattr(obj, self.queryset.model._mptt_meta.tree_id_attr, 0) left = getattr(obj, self.queryset.model._mptt_meta.left_attr, 0) return super(MPTTModelChoiceIterator, self).choice(obj) + ((tree_id, left),)
Overloads the choice method to add the position of the object in the tree for future sorting.
380,588
def pop_group(self): return Pattern._from_pointer( cairo.cairo_pop_group(self._pointer), incref=False)
Terminates the redirection begun by a call to :meth:`push_group` or :meth:`push_group_with_content` and returns a new pattern containing the results of all drawing operations performed to the group. The :meth:`pop_group` method calls :meth:`restore`, (balancing a call to :meth:`save` by the push_group method), so that any changes to the graphics state will not be visible outside the group. :returns: A newly created :class:`SurfacePattern` containing the results of all drawing operations performed to the group.
380,589
def _visual_bounds_at(self, pos, node=None): if node is None: node = self.scene for ch in node.children: hit = self._visual_bounds_at(pos, ch) if hit is not None: return hit if (not isinstance(node, VisualNode) or not node.visible or not node.interactive): return None bounds = [node.bounds(axis=i) for i in range(2)] if None in bounds: return None tr = self.scene.node_transform(node).inverse corners = np.array([ [bounds[0][0], bounds[1][0]], [bounds[0][0], bounds[1][1]], [bounds[0][1], bounds[1][0]], [bounds[0][1], bounds[1][1]]]) bounds = tr.map(corners) xhit = bounds[:, 0].min() < pos[0] < bounds[:, 0].max() yhit = bounds[:, 1].min() < pos[1] < bounds[:, 1].max() if xhit and yhit: return node
Find a visual whose bounding rect encompasses *pos*.
380,590
def compile_into_spirv(raw, stage, filepath, language="glsl", optimization=, suppress_warnings=False, warnings_as_errors=False): stage = stages_mapping[stage] lang = languages_mapping[language] opt = opt_mapping[optimization] options = lib.shaderc_compile_options_initialize() lib.shaderc_compile_options_set_source_language(options, lang) lib.shaderc_compile_options_set_optimization_level(options, opt) lib.shaderc_compile_options_set_target_env( options, lib.shaderc_target_env_vulkan, 0) lib.shaderc_compile_options_set_auto_bind_uniforms(options, False) lib.shaderc_compile_options_set_include_callbacks( options, lib.resolve_callback, lib.release_callback, ffi.NULL) if suppress_warnings: lib.shaderc_compile_options_set_suppress_warnings(options) if warnings_as_errors: lib.shaderc_compile_options_set_warnings_as_errors(options) compiler = lib.shaderc_compiler_initialize() result = lib.shaderc_compile_into_spv(compiler, raw, len(raw), stage, str.encode(filepath), b"main", options) status = lib.shaderc_result_get_compilation_status(result) if status != lib.shaderc_compilation_status_success: msg = _get_log(result) lib.shaderc_compile_options_release(options) lib.shaderc_result_release(result) lib.shaderc_compiler_release(compiler) raise CompilationError(msg) length = lib.shaderc_result_get_length(result) output_pointer = lib.shaderc_result_get_bytes(result) tmp = bytearray(length) ffi.memmove(tmp, output_pointer, length) spirv = bytes(tmp) lib.shaderc_compile_options_release(options) lib.shaderc_result_release(result) lib.shaderc_compiler_release(compiler) return spirv
Compile shader code into Spir-V binary. This function uses shaderc to compile your glsl or hlsl code into Spir-V code. You can refer to the shaderc documentation. Args: raw (bytes): glsl or hlsl code (bytes format, not str) stage (str): Pipeline stage in ['vert', 'tesc', 'tese', 'geom', 'frag', 'comp'] filepath (str): Absolute path of the file (needed for #include) language (str): 'glsl' or 'hlsl' optimization (str): 'zero' (no optimization) or 'size' (reduce size) suppress_warnings (bool): True to suppress warnings warnings_as_errors (bool): Turn warnings into errors Returns: bytes: Compiled Spir-V binary. Raises: CompilationError: If compilation fails.
380,591
def fetch(self): if self._fetched is not None: raise RuntimeError( ) if self._cache: self._fetched = len(self._cache) return self._cache[0:self._fetched]
Gives all the data it has stored, and remembers what it has given. Later we need to call commit() to actually remove the data from the cache.
380,592
def build_launcher(self, clsname, kind=None): try: klass = find_launcher_class(clsname, kind) except (ImportError, KeyError): self.log.fatal("Could not import launcher class: %r"%clsname) self.exit(1) launcher = klass( work_dir=u, config=self.config, log=self.log, profile_dir=self.profile_dir.location, cluster_id=self.cluster_id, ) return launcher
import and instantiate a Launcher based on importstring
380,593
def _process_macro_default_arg(self): while self._parenthesis_stack: match = self._expect_match( , STRING_PATTERN, NON_STRING_MACRO_ARGS_PATTERN, method= ) matchgroups = match.groupdict() self.advance(match.end()) if matchgroups.get() is not None: continue elif matchgroups.get() is not None: self.rewind() match = self._expect_match(, STRING_PATTERN) self.advance(match.end()) elif matchgroups.get() is not None: )
Handle the bit after an '=' in a macro default argument. This is probably the trickiest thing. The goal here is to accept all strings jinja would accept and always handle block start/end correctly: It's fine to have false positives, jinja can fail later. Return True if there are more arguments expected.
380,594
def get_order_history(self, market=None): if market: return self._api_query(path_dict={ API_V1_1: , API_V2_0: }, options={: market, : market}, protection=PROTECTION_PRV) else: return self._api_query(path_dict={ API_V1_1: , API_V2_0: }, protection=PROTECTION_PRV)
Used to retrieve order trade history of account Endpoint: 1.1 /account/getorderhistory 2.0 /key/orders/getorderhistory or /key/market/GetOrderHistory :param market: optional a string literal for the market (ie. BTC-LTC). If omitted, will return for all markets :type market: str :return: order history in JSON :rtype : dict
380,595
def update_instance( self, instance, field_mask, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): if "update_instance" not in self._inner_api_calls: self._inner_api_calls[ "update_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_instance, default_retry=self._method_configs["UpdateInstance"].retry, default_timeout=self._method_configs["UpdateInstance"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.UpdateInstanceRequest( instance=instance, field_mask=field_mask ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("instance.name", instance.name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["update_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata, )
Updates an instance, and begins allocating or releasing resources as requested. The returned ``long-running operation`` can be used to track the progress of updating the instance. If the named instance does not exist, returns ``NOT_FOUND``. Immediately upon completion of this request: - For resource types for which a decrease in the instance's allocation has been requested, billing is based on the newly-requested level. Until completion of the returned operation: - Cancelling the operation sets its metadata's ``cancel_time``, and begins restoring resources to their pre-request values. The operation is guaranteed to succeed at undoing all resource changes, after which point it terminates with a ``CANCELLED`` status. - All other attempts to modify the instance are rejected. - Reading the instance via the API continues to give the pre-request resource levels. Upon completion of the returned operation: - Billing begins for all successfully-allocated resources (some types may have lower than the requested levels). - All newly-reserved resources are available for serving the instance's tables. - The instance's new resource levels are readable via the API. The returned ``long-running operation`` will have a name of the format ``<instance_name>/operations/<operation_id>`` and can be used to track the instance modification. The ``metadata`` field type is ``UpdateInstanceMetadata``. The ``response`` field type is ``Instance``, if successful. Authorization requires ``spanner.instances.update`` permission on resource ``name``. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> # TODO: Initialize `instance`: >>> instance = {} >>> >>> # TODO: Initialize `field_mask`: >>> field_mask = {} >>> >>> response = client.update_instance(instance, field_mask) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to update, which must always include the instance name. Otherwise, only fields mentioned in [][google.spanner.admin.instance.v1.UpdateInstanceRequest.field\_mask] need be included. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): Required. A mask specifying which fields in [][google.spanner.admin.instance.v1.UpdateInstanceRequest.instance] should be updated. The field mask must always be specified; this prevents any future fields in [][google.spanner.admin.instance.v1.Instance] from being erased accidentally by clients that do not know about them. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
380,596
def complete(self, text: str) -> Iterable[str]: assert not text.startswith(":") if "/" in text: prefix, suffix = text.split("/", maxsplit=1) results = itertools.chain( self.__complete_alias(prefix, name_in_ns=suffix), self.__complete_imports_and_aliases(prefix, name_in_module=suffix), ) else: results = itertools.chain( self.__complete_alias(text), self.__complete_imports_and_aliases(text), self.__complete_interns(text), self.__complete_refers(text), ) return results
Return an iterable of possible completions for the given text in this namespace.
380,597
def bulk_create(self, objs, *args, **kwargs): if hasattr(self.model, ): self.model.save_prep(instance_or_instances=objs) return super(CommonManager, self).bulk_create(objs=objs, *args, **kwargs)
Insert many object at once.
380,598
def configure_client( cls, address: Union[str, Tuple[str, int], Path] = , port: int = 6379, db: int = 0, password: str = None, ssl: Union[bool, str, SSLContext] = False, **client_args) -> Dict[str, Any]: assert check_argument_types() if isinstance(address, str) and not address.startswith(): address = (address, port) elif isinstance(address, Path): address = str(address) client_args.update({ : address, : db, : password, : resolve_reference(ssl) }) return client_args
Configure a Redis client. :param address: IP address, host name or path to a UNIX socket :param port: port number to connect to (ignored for UNIX sockets) :param db: database number to connect to :param password: password used if the server requires authentication :param ssl: one of the following: * ``False`` to disable SSL * ``True`` to enable SSL using the default context * an :class:`~ssl.SSLContext` instance * a ``module:varname`` reference to an :class:`~ssl.SSLContext` instance * name of an :class:`~ssl.SSLContext` resource :param client_args: extra keyword arguments passed to :func:`~aioredis.create_redis_pool`
380,599
def import_source(self, sheet, source, delimiter=","): if in sheet: raise RuntimeError("Error sheet name: %s" % sheet) if not source.endswith("txt") and not source.endswith("csv"): raise RuntimeError("Error source name: %s" % source) self.source_sheet = sheet source_data = np.loadtxt(source, dtype=str, delimiter=delimiter) self.source_data = {"title": source_data[0].tolist(), "data": source_data[1:]} cell_format_title = self.workbook.add_format({: True, : u, : , : 45}) cell_format = self.workbook.add_format({: False, : u, : 0}) worksheet = self.workbook.add_worksheet(sheet) worksheet.write_row(, self.source_data[], cell_format_title) _, col_num = self.source_data[].shape for i in range(col_num): try: data_array = self.source_data[][:, i].astype(float) except ValueError: data_array = self.source_data[][:, i] worksheet.write_column(1, i, data_array.tolist(), cell_format)
Function: Save original data into specific sheet, and try to translate data to float type Input: sheet: Must be a non exists sheet source: File path of source