Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
8,100
def ko_data(queryset, field_names=None, name=None, safe=False, return_json=False): try: try: queryset_instance = queryset[0] except TypeError as e: queryset_instance = queryset queryset = [queryset] except IndexError as e: if not isinstance(queryset, list): queryset_instance = queryset.model else: return modelName = queryset_instance.__class__.__name__ modelNameData = [] if field_names is not None: fields = field_names else: fields = get_fields(queryset_instance) for obj in queryset: object_data = get_object_data(obj, fields, safe) modelNameData.append(object_data) if name: modelNameString = name else: modelNameString = modelName + "Data" dthandler = lambda obj: obj.isoformat() if isinstance(obj, (datetime.date, datetime.datetime)) else None dumped_json = json.dumps(modelNameData, default=dthandler) if return_json: return dumped_json return "var " + modelNameString + " = " + dumped_json + except Exception as e: logger.exception(e) return
Given a QuerySet, return just the serialized representation based on the knockout_fields as JavaScript.
8,101
def _matrix_integration(q, h, t): N = len(q) if h[-1] < 0.9: h[-1] = 1.0 W = np.zeros([N, N]) for i in range(N): W[i, i] = 0.5*(h[min(i+1, N-1)] - h[max(i-1, 0)]) dp = (q - t).T.dot(W).dot(q - t) return dp
Returns the dp metric for a single horsetail curve at a given value of the epistemic uncertainties
8,102
def set_lock(key, value=None, expiry_time=60): from uliweb.utils.common import get_uuid redis = get_redis() value = value or get_uuid() return redis.set(key, value, ex=expiry_time, xx=True)
Force to set a distribute lock
8,103
def write_lst(self): ret = False out = system = self.system dae = self.system.dae varname = self.system.varname template = out += template.format(0, , ) nflows = 0 if self.system.tds.config.compute_flows: nflows = 2 * self.system.Bus.n + \ 8 * self.system.Line.n + \ 2 * self.system.Area.n_combination if system.Recorder.n == 0: state_idx = list(range(dae.n)) algeb_idx = list(range(dae.n, dae.n + dae.m + nflows)) idx = state_idx + algeb_idx else: idx = system.Recorder.varout_idx uname = varname.unamex + varname.unamey fname = varname.fnamex + varname.fnamey for e, i in enumerate(idx): out += template.format(e + 1, uname[i], fname[i]) try: with open(self.system.files.lst, ) as f: f.write(out) ret = True except IOError: logger.error() return ret
Dump the variable name lst file :return: succeed flag
8,104
def get_impls(interfaces): if interfaces is None: return None elif isinstance(interfaces, Mapping): return {name: interfaces[name]._impl for name in interfaces} elif isinstance(interfaces, Sequence): return [interfaces._impl for interfaces in interfaces] else: return interfaces._impl
Get impls from their interfaces.
8,105
def log_pdf(self, y, mu, weights=None): if weights is None: weights = np.ones_like(mu) scale = self.scale / weights return sp.stats.norm.logpdf(y, loc=mu, scale=scale)
computes the log of the pdf or pmf of the values under the current distribution Parameters ---------- y : array-like of length n target values mu : array-like of length n expected values weights : array-like shape (n,) or None, default: None sample weights if None, defaults to array of ones Returns ------- pdf/pmf : np.array of length n
8,106
def get_absolute_url(self, endpoint): copy = deepcopy(self.extra_data) if in copy: copy[] = copy.pop() return url_for( endpoint, token=self.token, _external=True, **(copy or {}) )
Get absolute for secret link (using https scheme). The endpoint is passed to ``url_for`` with ``token`` and ``extra_data`` as keyword arguments. E.g.:: >>> link.extra_data dict(recid=1) >>> link.get_absolute_url('record.metadata') translates into:: >>> url_for('record.metadata', token="...", recid=1, )
8,107
def substitute_variables(command, level, name, value, target=None, **kwargs): rule = kwargs.get(, {}) rule_value = rule.get(, ) if rule else substitutes = { : str(level), : str(target), : + str(name) + , : str(value), : str(rule_value), } result = command for pattern, value in substitutes.items(): result = result.replace(pattern, value) return result
Substitute variables in command fragments by values e.g. ${level} => 'warning'.
8,108
def catalog(self): if self._catalog is None: logger.debug("SuperModel::catalog: *Fetch catalog*") self._catalog = self.get_catalog_for(self.brain) return self._catalog
Primary registered catalog for the wrapped portal type
8,109
def find_previous_siblings(self, *args, **kwargs): op = operator.methodcaller(, *args, **kwargs) return self._wrap_multi(op)
Like :meth:`find_all`, but searches through :attr:`previous_siblings`
8,110
def aggregate_in(Data, On=None, AggFuncDict=None, AggFunc=None, AggList=None, interspersed=True):
Aggregate a ndarray with structured dtype or recarray and include original data in the result. Take aggregate of data set on specified columns, then add the resulting rows back into data set to make a composite object containing both original non-aggregate data rows as well as the aggregate rows. First read comments for :func:`tabular.spreadsheet.aggregate`. This function returns a numpy ndarray, with the number of rows equaling:: len(Data) + len(A) where `A` is the the result of:: Data.aggregate(On,AggFuncDict) `A` represents the aggregate rows; the other rows were the original data rows. This function supports _multiple_ aggregation, meaning that one can first aggregate on one set of factors, then repeat aggregation on the result for another set of factors, without the results of the first aggregation interfering the second. To achieve this, the method adds two new columns: * a column called "__aggregates__" specifying on which factors the rows that are aggregate rows were aggregated. Rows added by aggregating on factor `A` (a column in the original data set) will have `A` in the "__aggregates__" column. When multiple factors `A1`, `A2` , ... are aggregated on, the notation is a comma-separated list: `A1,A2,...`. This way, when you call `aggregate_in` again, the function only aggregates on the columns that have the empty char '' in their "__aggregates__" column. * a column called '__color__', specifying Gray-Scale colors for aggregated rows that will be used by the Data Environment system browser for colorizing the data. When there are multiple levels of aggregation, the coarser aggregate groups (e.g. on fewer factors) get darker gray color then those on finer aggregate groups (e.g. more factors). Implemented by the tabarray method :func:`tabular.tab.tabarray.aggregate_in`. **Parameters** **Data** : numpy ndarray with structured dtype or recarray The data set to aggregate in. **On** : list of strings, optional List of column names in `X`. **AggFuncDict** : dictionary, optional Dictionary where * keys are some (all) column names of `X` that are NOT in `On` * values are functions that can be applied to lists or numpy arrays. This specifies how to aggregate the factors _not_ listed in `On`, e.g. the so-called `Off` columns. **AggFunc** : function, optional Function that can be applied to lists or numpy arrays, specifying how to aggregate factors not listed in either `On` or the keys of `AggFuncDict`, e.g. a "default" aggregation function for the `Off` columns not explicitly listed in `AggFuncDict`. **interspersed** : boolean, optional * If `True`, aggregate rows are interleaved with the data of which they are aggregates. * If `False`, all aggregate rows placed at the end of the array. **Returns** **agg** : numpy ndarray with structured dtype Composite aggregated data set plus original data set. **See also:** :func:`tabular.spreadsheet.aggregate`
8,111
def force_type(cls, response, environ=None): if not isinstance(response, BaseResponse): if environ is None: raise TypeError( "cannot convert WSGI application into response" " objects without an environ" ) response = BaseResponse(*_run_wsgi_app(response, environ)) response.__class__ = cls return response
Enforce that the WSGI response is a response object of the current type. Werkzeug will use the :class:`BaseResponse` internally in many situations like the exceptions. If you call :meth:`get_response` on an exception you will get back a regular :class:`BaseResponse` object, even if you are using a custom subclass. This method can enforce a given response type, and it will also convert arbitrary WSGI callables into response objects if an environ is provided:: # convert a Werkzeug response object into an instance of the # MyResponseClass subclass. response = MyResponseClass.force_type(response) # convert any WSGI application into a response object response = MyResponseClass.force_type(response, environ) This is especially useful if you want to post-process responses in the main dispatcher and use functionality provided by your subclass. Keep in mind that this will modify response objects in place if possible! :param response: a response object or wsgi application. :param environ: a WSGI environment object. :return: a response object.
8,112
def update_history(self) -> None: self.log.debug(f"Saving history. History is: \n{self.history}") jsons = [] for item in self.history: json_item = item.__dict__ json_item["output_records"] = self._parse_output_records(item) jsons.append(json_item) if not path.isfile(self.history_filename): open(self.history_filename, "a+").close() with open(self.history_filename, "w") as f: json.dump(jsons, f, default=lambda x: x.__dict__.copy(), sort_keys=True, indent=4) f.write("\n")
Update messaging history on disk. :returns: None
8,113
def create(self, session): sessionid = super().create(session) self._cache(session, sessionid) return sessionid
caches the session and caches an entry to associate the cached session with the subject
8,114
def trim(self): if self.hasText(): self.text = self.text.trim() return self
Trim leading and trailing whitespace. @return: self @rtype: L{Element}
8,115
def get_value(self, subsystem, option): assert subsystem in self, .format(subsystem) return util.read_file(self.per_subsystem[subsystem], subsystem + + option)
Read the given value from the given subsystem. Do not include the subsystem name in the option name. Only call this method if the given subsystem is available.
8,116
def query_status(self): try: data = self.api_iface._api_get(self.link) self._update_details(data) except APIError as e: print("API error: ") for key,value in e.data.iteritems: print(str(key) + ": " + str(value))
Query the hub for the status of this command
8,117
def ef_plugin(service_name): def class_rebuilder(cls): class EFPlugin(cls): def __init__(self, context, clients): self.service = service_name self.context = context self.clients = clients self.oInstance = cls() def __getattribute__(self, s): try: x = super(EFPlugin, self).__getattribute__(s) except AttributeError: pass else: return x return self.oInstance.__getattribute__(s) return EFPlugin return class_rebuilder
Decorator for ef plugin classes. Any wrapped classes should contain a run() method which executes the plugin code. Args: service_name (str): The name of the service being extended. Example: @ef_plugin('ef-generate') class NewRelicPlugin(object): def run(self): exec_code()
8,118
def clusters(points, radius): from . import graph tree = cKDTree(points) pairs = tree.query_pairs(r=radius, output_type=) groups = graph.connected_components(pairs) return groups
Find clusters of points which have neighbours closer than radius Parameters --------- points : (n, d) float Points of dimension d radius : float Max distance between points in a cluster Returns ---------- groups : (m,) sequence of int Indices of points in a cluster
8,119
def czdivide(a, b, null=0): s divide function or a/b syntax, czdivide will thread over the latest dimension possible. Unlike numpy if null == 0: return a.multiply(zinv(b)) if sps.issparse(a) else a * zinv(b) elif sps.issparse(b): b = b.toarray() else: b = np.asarray(b) z = np.isclose(b, 0) q = np.logical_not(z) zi = q / (b + z) if sps.issparse(a): r = a.multiply(zi).tocsr() else: r = np.asarray(a) * zi r[np.ones(a.shape, dtype=np.bool)*z] = null return r
czdivide(a, b) returns the quotient a / b as a numpy array object. Like numpy's divide function or a/b syntax, czdivide will thread over the latest dimension possible. Unlike numpy's divide, czdivide works with sparse matrices. Additionally, czdivide multiplies a by the zinv of b, so divide-by-zero entries are replaced with 0 in the result. The optional argument null (default: 0) may be given to specify that zeros in the arary b should instead be replaced with the given value in the result. Note that if this value is not equal to 0, then any sparse array passed as argument b must be reified. The czdivide function never raises an error due to divide-by-zero; if you desire this behavior, use the cdivide function instead.
8,120
def create_message(self, params={}): url = "/2/messages/" body = params data = self._post_resource(url, body) return self.message_from_json(data["message"])
Creates a message http://dev.wheniwork.com/#create/update-message
8,121
def _manipulate(self, *args, **kwargs): self.connection._manipulate(self, *args, **kwargs)
This is a semi-private method. It's current use is to manipulate memory file system objects so that you can create certain conditions, to provoke errors that otherwise won't occur.
8,122
def create(self, to, from_, parameters=values.unset): data = values.of({: to, : from_, : serialize.object(parameters), }) payload = self._version.create( , self._uri, data=data, ) return ExecutionInstance(self._version, payload, flow_sid=self._solution[], )
Create a new ExecutionInstance :param unicode to: The Contact phone number to start a Studio Flow Execution. :param unicode from_: The Twilio phone number to send messages or initiate calls from during the Flow Execution. :param dict parameters: JSON data that will be added to your flow's context and can accessed as variables inside your flow. :returns: Newly created ExecutionInstance :rtype: twilio.rest.studio.v1.flow.execution.ExecutionInstance
8,123
def multipointm(self, points): shapeType = MULTIPOINTM points = [points] self._shapeparts(parts=points, shapeType=shapeType)
Creates a MULTIPOINTM shape. Points is a list of xym values. If the m (measure) value is not included, it defaults to None (NoData).
8,124
def parse_issues(raw_page): raw_issues = json.loads(raw_page) issues = raw_issues[] for issue in issues: yield issue
Parse a JIRA API raw response. The method parses the API response retrieving the issues from the received items :param items: items from where to parse the issues :returns: a generator of issues
8,125
def main(argv=None): args = parse_arguments(sys.argv if argv is None else argv) tf.logging.set_verbosity(tf.logging.INFO) learn_runner.run( experiment_fn=get_experiment_fn(args), output_dir=args.job_dir)
Run a Tensorflow model on the Iris dataset.
8,126
def underline(self, msg): return click.style(msg, underline=True) if self.colorize else msg
Underline the input
8,127
def setup_logger(): formatter = ColoredFormatter( "%(log_color)s%(levelname)-8s%(reset)s %(blue)s%(message)s", datefmt=None, reset=True, log_colors={ : , : , : , : , : , } ) logger = logging.getLogger() handler = logging.StreamHandler() handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) return logger
Return a logger with a default ColoredFormatter.
8,128
def _extract_field_with_regex(self, field): matched = re.search(field, self.text) if not matched: err_msg = u"Failed to extract data with regex! => {}\n".format(field) err_msg += u"response body: {}\n".format(self.text) logger.log_error(err_msg) raise exceptions.ExtractFailure(err_msg) return matched.group(1)
extract field from response content with regex. requests.Response body could be json or html text. Args: field (str): regex string that matched r".*\(.*\).*" Returns: str: matched content. Raises: exceptions.ExtractFailure: If no content matched with regex. Examples: >>> # self.text: "LB123abcRB789" >>> filed = "LB[\d]*(.*)RB[\d]*" >>> _extract_field_with_regex(field) abc
8,129
def wrap_targets(self, targets, topological_order=False): def vt_iter(): if topological_order: target_set = set(targets) sorted_targets = [t for t in reversed(sort_targets(targets)) if t in target_set] else: sorted_targets = sorted(targets) for target in sorted_targets: target_key = self._key_for(target) if target_key is not None: yield VersionedTarget(self, target, target_key) return list(vt_iter())
Wrap targets and their computed cache keys in VersionedTargets. If the FingerprintStrategy opted out of providing a fingerprint for a target, that target will not have an associated VersionedTarget returned. Returns a list of VersionedTargets, each representing one input target.
8,130
def run_actor(self, actor): set_actor(actor) if not actor.mailbox.address: address = (, 0) actor._loop.create_task( actor.mailbox.start_serving(address=address) ) actor._loop.run_forever()
Start running the ``actor``.
8,131
def add_arg(self, arg): if not isinstance(arg, File): arg = str(arg) self._args += [arg]
Add an argument
8,132
def split_from_df(self, col:IntsOrStrs=2): "Split the data from the `col` in the dataframe in `self.inner_df`." valid_idx = np.where(self.inner_df.iloc[:,df_names_to_idx(col, self.inner_df)])[0] return self.split_by_idx(valid_idx)
Split the data from the `col` in the dataframe in `self.inner_df`.
8,133
def _to_dict(self): _dict = {} if hasattr(self, ) and self.key is not None: _dict[] = self.key._to_dict() if hasattr(self, ) and self.value is not None: _dict[] = self.value._to_dict() return _dict
Return a json dictionary representing this model.
8,134
def get_detail(self): response = self._post("http://bkjws.sdu.edu.cn/b/grxx/xs/xjxx/detail", data=None) if response[] == : self._detail = response[] return self._detail else: self._unexpected(response)
个人信息,同时会把返回值保存在self.detail中 :return: information of student :rtype: dict
8,135
def lms(args): from random import randint from jcvi.graphics.chromosome import HorizontalChromosome p = OptionParser(lms.__doc__) opts, args, iopts = p.set_image_options(args, figsize="6x6", dpi=300) fig = plt.figure(1, (iopts.w, iopts.h)) root = fig.add_axes([0, 0, 1, 1]) w, h = .7, .35 ax = fig.add_axes([.15, .6, w, h]) xdata = [x + randint(-3, 3) for x in range(10, 110, 10)] ydata = [x + randint(-3, 3) for x in range(10, 110, 10)] ydata[3:7] = ydata[3:7][::-1] xydata = zip(xdata, ydata) lis = xydata[:3] + [xydata[4]] + xydata[7:] lds = xydata[3:7] xlis, ylis = zip(*lis) xlds, ylds = zip(*lds) ax.plot(xlis, ylis, "r-", lw=12, alpha=.3, solid_capstyle="round", solid_joinstyle="round") ax.plot(xlds, ylds, "g-", lw=12, alpha=.3, solid_capstyle="round", solid_joinstyle="round") ax.plot(xdata, ydata, "k.", mec="k", mfc="w", mew=3, ms=12) HorizontalChromosome(root, .15, .15 + w, .57, height=.02, lw=2) root.text(.15 + w / 2, .55, "Chromosome location (bp)", ha="center", va="top") ax.text(80, 30, "LIS = 7", color="r", ha="center", va="center") ax.text(80, 20, "LDS = 4", color="g", ha="center", va="center") ax.text(80, 10, "LMS = $max$(LIS, LDS) = 7", ha="center", va="center") normalize_lms_axis(ax, xlim=110, ylim=110) w = .37 p = (0, 45, 75, 110) ax = fig.add_axes([.1, .12, w, h]) xdata = [x for x in range(10, 110, 10)] ydata = ydata_orig = [x for x in range(10, 110, 10)] ydata = ydata[:4] + ydata[7:] + ydata[4:7][::-1] xydata = zip(xdata, ydata) lis = xydata[:7] xlis, ylis = zip(*lis) ax.plot(xlis, ylis, "r-", lw=12, alpha=.3, solid_capstyle="round", solid_joinstyle="round") ax.plot(xdata, ydata, "k.", mec="k", mfc="w", mew=3, ms=12) ax.vlines(p, 0, 110, colors="beige", lw=3) normalize_lms_axis(ax, xlim=110, ylim=110) patch = [.1 + w * x / 110. for x in p] HorizontalChromosome(root, .1, .1 + w, .09, patch=patch, height=.02, lw=2) scaffolds = ("a", "b", "c") for i, s in enumerate(scaffolds): xx = (patch[i] + patch[i + 1]) / 2 root.text(xx, .09, s, va="center", ha="center") root.text(.1 + w / 2, .04, "LMS($a||b||c$) = 7", ha="center") ax = fig.add_axes([.6, .12, w, h]) patch = [.6 + w * x / 110. for x in p] ydata = ydata_orig ax.plot(xdata, ydata, "r-", lw=12, alpha=.3, solid_capstyle="round", solid_joinstyle="round") ax.plot(xdata, ydata, "k.", mec="k", mfc="w", mew=3, ms=12) ax.vlines(p, [0], [110], colors="beige", lw=3) normalize_lms_axis(ax, xlim=110, ylim=110) HorizontalChromosome(root, .6, .6 + w, .09, patch=patch, height=.02, lw=2) scaffolds = ("a", "-c", "b") for i, s in enumerate(scaffolds): xx = (patch[i] + patch[i + 1]) / 2 root.text(xx, .09, s, va="center", ha="center") root.text(.6 + w / 2, .04, "LMS($a||-c||b$) = 10", ha="center") labels = ((.05, .95, ), (.05, .48, ), (.55, .48, )) panel_labels(root, labels) normalize_axes(root) pf = "lms" image_name = pf + "." + iopts.format savefig(image_name, dpi=iopts.dpi, iopts=iopts)
%prog lms ALLMAPS cartoon to illustrate LMS metric.
8,136
def read_cell(self, x, y): if isinstance(self.header[y], tuple): header = self.header[y][0] else: header = self.header[y] x += 1 y += 1 if self.strip: self._sheet.cell(x, y).value = self._sheet.cell(x, y).value.strip() else: return {header: self._sheet.cell(x, y).value}
Reads the cell at position x+1 and y+1; return value :param x: line index :param y: coll index :return: {header: value}
8,137
def get_app_template(name): app_name, template_name = name.split() return get_lookups()[app_name].get_template(template_name)
Getter function of templates for each applications. Argument `name` will be interpreted as colon separated, the left value means application name, right value means a template name. get_app_template('blog:dashboarb.mako') It will return a template for dashboard page of `blog` application.
8,138
def typeseq(types): ret = "" for t in types: ret += termcap.get(fmttypes[t]) return ret
Returns an escape for a terminal text formatting type, or a list of types. Valid types are: * 'i' for 'italic' * 'b' for 'bold' * 'u' for 'underline' * 'r' for 'reverse'
8,139
def clear_cache(): del Cache._keys for k in list(Cache._cache.keys()): it = Cache._cache.pop(k) del it del Cache._cache Cache._keys = [] Cache._cache = {} gc.collect()
Remove all cached objects
8,140
def delete_asset(self): headers, data = self._requester.requestJsonAndCheck( "DELETE", self.url ) return True
Delete asset from the release. :rtype: bool
8,141
def finalize(self): if self.result: self.result = sorted(self.result, key=lambda x: x[0]) p, r = map(list, zip(*self.result)) self.result = r
finalize simulation for consumer
8,142
def stream_header(self, f): stream_struct("L self.merkle_root, self.timestamp, self.difficulty, self.nonce)
Stream the block header in the standard way to the file-like object f.
8,143
def auth(self, user, pwd): pwdHash = base64.b64encode(hashlib.md5((user + "\nskyper\n" + pwd).encode("utf-8")).digest()).decode("utf-8") json = self.conn("POST", "{0}/login/skypetoken".format(SkypeConnection.API_USER), json={"username": user, "passwordHash": pwdHash, "scopes": "client"}).json() if "skypetoken" not in json: raise SkypeAuthException("Couldn't retrieve Skype token from response") expiry = None if "expiresIn" in json: expiry = datetime.fromtimestamp(int(time.time()) + int(json["expiresIn"])) return json["skypetoken"], expiry
Perform a login with the given Skype username and its password. This emulates a login to Skype for Web on ``api.skype.com``. Args: user (str): username of the connecting account pwd (str): password of the connecting account Returns: (str, datetime.datetime) tuple: Skype token, and associated expiry if known Raises: .SkypeAuthException: if the login request is rejected .SkypeApiException: if the login form can't be processed
8,144
def set_project_pid(project, old_pid, new_pid): for datastore in _get_datastores(): datastore.save_project(project) datastore.set_project_pid(project, old_pid, new_pid)
Project's PID was changed.
8,145
def draw_on_image(self, image, color=(0, 255, 0), alpha=1.0, size=1, copy=True, raise_if_out_of_image=False, thickness=None): image = np.copy(image) if copy else image for bb in self.bounding_boxes: image = bb.draw_on_image( image, color=color, alpha=alpha, size=size, copy=False, raise_if_out_of_image=raise_if_out_of_image, thickness=thickness ) return image
Draw all bounding boxes onto a given image. Parameters ---------- image : (H,W,3) ndarray The image onto which to draw the bounding boxes. This image should usually have the same shape as set in BoundingBoxesOnImage.shape. color : int or list of int or tuple of int or (3,) ndarray, optional The RGB color of all bounding boxes. If a single int ``C``, then that is equivalent to ``(C,C,C)``. alpha : float, optional Alpha/transparency of the bounding box. size : int, optional Thickness in pixels. copy : bool, optional Whether to copy the image before drawing the bounding boxes. raise_if_out_of_image : bool, optional Whether to raise an exception if any bounding box is outside of the image. thickness : None or int, optional Deprecated. Returns ------- image : (H,W,3) ndarray Image with drawn bounding boxes.
8,146
def zero_disk(self, disk_xml=None): troubled_disks = 0 for filer_disk in disk_xml: raid_state = filer_disk.find().text if not raid_state == : continue is_zeroed = filer_disk.find().text if is_zeroed == : troubled_disks += 1 self.push(, , troubled_disks)
Collector and publish not zeroed disk metrics
8,147
def close(self): if self.parent: self.parent.update(self.parent.offset + self.offset) return self.output.write("\n") self.output.flush()
Stop overwriting display, or update parent.
8,148
def get_version(): version_regex = re.compile( "])(?P<version>\\d+(\\.\\d+)*(-(alpha|beta|rc)(\\.\\d+)?)?)(?P=q)t read version information from ".format(init_location) ) return match.group()
Read version from __init__.py
8,149
def get_assets_by_query(self, asset_query=None): return AssetList(self._provider_session.get_assets_by_query(asset_query), self._config_map)
Gets a list of ``Assets`` matching the given asset query. arg: asset_query (osid.repository.AssetQuery): the asset query return: (osid.repository.AssetList) - the returned ``AssetList`` raise: NullArgument - ``asset_query`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - the ``asset_query`` is not of this service *compliance: mandatory -- This method must be implemented.*
8,150
def default(self, obj): if hasattr(obj, ): return obj.strftime("%Y-%m-%dT%H:%M:%SZ") elif hasattr(obj, ): return obj.get_public_dict() else: return json.JSONEncoder.default(self, obj)
Use the default behavior unless the object to be encoded has a `strftime` attribute.
8,151
def install_service(instance, dbhost, dbname, port): _check_root() log("Installing systemd service") launcher = os.path.realpath(__file__).replace(, ) executable = sys.executable + " " + launcher executable += " --instance " + instance executable += " --dbname " + dbname + " --dbhost " + dbhost executable += " --port " + port executable += " --dolog --logfile /var/log/hfos-" + instance + ".log" executable += " --logfileverbosity 30 -q" definitions = { : instance, : executable } service_name = + instance + write_template_file(os.path.join(, service_template), os.path.join(, service_name), definitions) Popen([ , , service_name ]) log() Popen([ , , service_name ]) log("Done: Install Service")
Install systemd service configuration
8,152
def synchronise_signals(in_signal_1, in_signal_2): mean_1, std_1, mean_2, std_2 = [np.mean(in_signal_1), np.std(in_signal_1), np.mean(in_signal_2), np.std(in_signal_2)] signal_1 = in_signal_1 - mean_1 signal_1 /= std_1 signal_2 = in_signal_2 - mean_2 signal_2 /= std_2 correlation = np.correlate(signal_1, signal_2, ) center = len(correlation) - len(signal_1) if len(signal_1) < len(signal_2) else len(correlation) - len(signal_2) max_position = correlation.argmax() phase_straight = center - max_position max_position_reversed = correlation[::-1].argmax() phase_reversed = center - max_position_reversed phases_aux = [phase_straight, phase_reversed] phase = np.abs(phases_aux).argmax() true_phase = np.abs(phases_aux[phase]) if phases_aux[0] < phases_aux[1]: signal_1 = signal_1[true_phase:] else: signal_2 = signal_2[true_phase:] result_signal_1 = signal_1 * std_1 + mean_1 result_signal_2 = signal_2 * std_2 + mean_2 return true_phase, result_signal_1, result_signal_2
----- Brief ----- This function synchronises the input signals using the full cross correlation function between the signals. ----------- Description ----------- Signals acquired with two devices may be dephased. It is possible to synchronise the two signals by multiple methods. Here, it is implemented a method that uses the calculus of the cross-correlation between those signals and identifies the correct instant of synchrony. This function synchronises the two input signals and returns the dephasing between them, and the resulting synchronised signals. ---------- Parameters ---------- in_signal_1 : list or numpy.array One of the input signals. in_signal_2 : list or numpy.array The other input signal. Returns ------- phase : int The dephasing between signals in data points. result_signal_1: list or numpy.array The first signal synchronised. result_signal_2: list or numpy.array The second signal synchronised.
8,153
def _advance_to_next_stage(self, config_ids, losses): ranks = np.argsort(np.argsort(losses)) return(ranks < self.num_configs[self.stage])
SuccessiveHalving simply continues the best based on the current loss.
8,154
def _parse_incval(incunit, incval): try: retn = [int(val) for val in incval.split()] except ValueError: return None return retn[0] if len(retn) == 1 else retn
Parse a non-day increment value. Should be an integer or a comma-separated integer list.
8,155
def change_state(self, item, state): tags = self.item(item, "tags") states = ("checked", "unchecked", "tristate") new_tags = [t for t in tags if t not in states] new_tags.append(state) self.item(item, tags=tuple(new_tags))
Replace the current state of the item. i.e. replace the current state tag but keeps the other tags. :param item: item id :type item: str :param state: "checked", "unchecked" or "tristate": new state of the item :type state: str
8,156
def npd_to_pmf(nodal_plane_dist, use_default=False): if isinstance(nodal_plane_dist, PMF): return nodal_plane_dist else: if use_default: return PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))]) else: raise ValueError()
Returns the nodal plane distribution as an instance of the PMF class
8,157
def from_command_line(): parser = argparse.ArgumentParser( description=) parser.add_argument( , , metavar=, required=True, dest=, help=) parser.add_argument( , , metavar=, dest=, help= ) parser.add_argument( , , metavar=, dest=, help=) parser.add_argument( , , action=, dest=, help=) parser.add_argument( , , action=, dest=, help=) args = parser.parse_args() twobit_path, twobit_name = get_reference_genome_file( args.refseqdir, build=) if sys.stdin.isatty(): var_input = args.cgivarfile else: var_input = sys.stdin if args.vcfoutfile: convert_to_file(var_input, args.vcfoutfile, twobit_path, twobit_name, args.varonly) else: for line in convert( cgi_input=var_input, twobit_ref=twobit_path, twobit_name=twobit_name, var_only=args.varonly): print(line)
Run CGI var to gVCF conversion from the command line.
8,158
def apply_T4(word): WORD = _split_consonants_and_vowels(word) for k, v in WORD.iteritems(): if len(v) == 2 and v.endswith((, )): if WORD.get(k + 2, 0): if not WORD.get(k + 3, 0): if len(WORD[k + 2]) == 1 and is_consonant(WORD[k + 2]): WORD[k] = v[0] + + v[1] elif len(WORD[k + 1]) == 1 and WORD.get(k + 3, 0): if is_consonant(WORD[k + 3][0]): WORD[k] = v[0] + + v[1] elif len(WORD[k + 2]) == 2: WORD[k] = v[0] + + v[1] word = _compile_dict_into_word(WORD) return word
An agglutination diphthong that ends in /u, y/ usually contains a syllable boundary when -C# or -CCV follow, e.g., [lau.ka.us], [va.ka.ut.taa].
8,159
def get_exported(self): return dict((k, self.vars[k]) for k in self.exported_vars)
Get a new dict with the exported variables.
8,160
def process(self, block=True): self.msg_types = set() self.multiplier = [] self.field_types = [] self.x = [] self.y = [] self.modes = [] self.axes = [] self.first_only = [] re_caps = re.compile() for f in self.fields: caps = set(re.findall(re_caps, f)) self.msg_types = self.msg_types.union(caps) self.field_types.append(caps) self.y.append([]) self.x.append([]) self.axes.append(1) self.first_only.append(False) if self.labels is not None: labels = self.labels.split() if len(labels) != len(fields)*len(self.mav_list): print("Number of labels (%u) must match number of fields (%u)" % ( len(labels), len(fields)*len(self.mav_list))) return else: labels = None timeshift = self.timeshift for fi in range(0, len(self.mav_list)): mlog = self.mav_list[fi] self.process_mav(mlog, timeshift) timeshift = 0 for i in range(0, len(self.x)): if self.first_only[i] and fi != 0: self.x[i] = [] self.y[i] = [] if labels: lab = labels[fi*len(self.fields):(fi+1)*len(self.fields)] else: lab = self.fields[:] if self.multi: col = colors[:] else: col = colors[fi*len(self.fields):] self.plotit(self.x, self.y, lab, colors=col) for i in range(0, len(self.x)): self.x[i] = [] self.y[i] = [] pylab.draw()
process and display graph
8,161
def disabledPenColor(self): palette = self.palette() return palette.color(palette.Disabled, palette.NodeForeground)
Returns the disabled pen color for this node. :return <QColor>
8,162
def convert_to_btc_on(self, amount, currency, date_obj): if isinstance(amount, Decimal): use_decimal = True else: use_decimal = self._force_decimal start = date_obj.strftime() end = date_obj.strftime() url = ( .format( start, end, currency ) ) response = requests.get(url) if response.status_code == 200: data = response.json() price = data.get(, {}).get(start, None) if price: if use_decimal: price = Decimal(price) try: converted_btc = amount/price return converted_btc except TypeError: raise DecimalFloatMismatchError("convert_to_btc_on requires amount parameter is of type Decimal when force_decimal=True") raise RatesNotAvailableError("BitCoin Rates Source Not Ready For Given Date")
Convert X amount to BTC based on given date rate
8,163
def get_word_at(self, index: int) -> Union[int, BitVec]: try: return symbol_factory.BitVecVal( util.concrete_int_from_bytes( bytes([util.get_concrete_int(b) for b in self[index : index + 32]]), 0, ), 256, ) except TypeError: result = simplify( Concat( [ b if isinstance(b, BitVec) else symbol_factory.BitVecVal(b, 8) for b in cast( List[Union[int, BitVec]], self[index : index + 32] ) ] ) ) assert result.size() == 256 return result
Access a word from a specified memory index. :param index: integer representing the index to access :return: 32 byte word at the specified index
8,164
def port_has_listener(address, port): cmd = [, , address, str(port)] result = subprocess.call(cmd) return not(bool(result))
Returns True if the address:port is open and being listened to, else False. @param address: an IP address or hostname @param port: integer port Note calls 'zc' via a subprocess shell
8,165
def service( state, host, *args, **kwargs ): if host.fact.which(): yield systemd(state, host, *args, **kwargs) return if host.fact.which(): yield upstart(state, host, *args, **kwargs) return if host.fact.directory(): yield d(state, host, *args, **kwargs) return if host.fact.directory(): yield rc(state, host, *args, **kwargs) return raise OperationError(( ))
Manage the state of services. This command checks for the presence of all the init systems pyinfra can handle and executes the relevant operation. See init system sepcific operation for arguments.
8,166
def intword(value, format=): try: value = int(value) except (TypeError, ValueError): return value if value < powers[0]: return str(value) for ordinal, power in enumerate(powers[1:], 1): if value < power: chopped = value / float(powers[ordinal - 1]) return (.join([format, _(human_powers[ordinal - 1])])) % chopped return str(value)
Converts a large integer to a friendly text representation. Works best for numbers over 1 million. For example, 1000000 becomes '1.0 million', 1200000 becomes '1.2 million' and '1200000000' becomes '1.2 billion'. Supports up to decillion (33 digits) and googol (100 digits). You can pass format to change the number of decimal or general format of the number portion. This function returns a string unless the value passed was unable to be coaxed into an int.
8,167
def execute(self, source, splitting_stream, sinks, interval, meta_data_id, output_plate_values): if not isinstance(interval, TimeInterval): raise TypeError(.format(type(interval))) calculated_intervals = None for sink in sinks: if interval.end > sink.channel.up_to_timestamp: raise ValueError( .format( sink.channel.up_to_timestamp)) if calculated_intervals is None: calculated_intervals = sink.calculated_intervals continue if sink.calculated_intervals != calculated_intervals: raise RuntimeError("Partially executed sinks not yet supported") required_intervals = TimeIntervals([interval]) - calculated_intervals if not required_intervals.is_empty: document_count = 0 for interval in required_intervals: for item in self._execute( source=source, splitting_stream=splitting_stream, interval=interval, meta_data_id=meta_data_id, output_plate_values=output_plate_values): meta_data = item.meta_data if isinstance(item.meta_data[0], tuple) else (item.meta_data,) try: sink = next(s for s in sinks if all(m in s.stream_id.meta_data for m in meta_data)) sink.writer(item.stream_instance) document_count += 1 except StopIteration: logging.warn("A multi-output tool has produced a value {} " "which does not belong to the output plate".format(meta_data)) continue except TypeError: logging.error("A multi-output tool has produced a value {} " "which cannot be hashed and does not belong to the output plate" .format(meta_data)) if not document_count: logging.debug("{} did not produce any data for time interval {} on stream {}".format( self.name, required_intervals, source)) self.write_to_history( interval=interval, tool=self.name, document_count=document_count )
Execute the tool over the given time interval. :param source: The source stream :param splitting_stream: The stream over which to split :param sinks: The sink streams :param interval: The time interval :param meta_data_id: The meta data id of the output plate :param output_plate_values: The values of the plate where data is put onto :type source: Stream :type sinks: list[Stream] | tuple[Stream] :type interval: TimeInterval :type meta_data_id: str :type output_plate_values: list | tuple :return: None
8,168
def kill_mprocess(process): if process and proc_alive(process): process.terminate() process.communicate() return not proc_alive(process)
kill process Args: process - Popen object for process
8,169
def get_operator_output_port(self): return OperatorOutputPort(self.rest_client.make_request(self.operatorOutputPort), self.rest_client)
Get the output port of this exported stream. Returns: OperatorOutputPort: Output port of this exported stream.
8,170
def pyramid( input_raster, output_dir, pyramid_type=None, output_format=None, resampling_method=None, scale_method=None, zoom=None, bounds=None, overwrite=False, debug=False ): bounds = bounds if bounds else None options = dict( pyramid_type=pyramid_type, scale_method=scale_method, output_format=output_format, resampling=resampling_method, zoom=zoom, bounds=bounds, overwrite=overwrite ) raster2pyramid(input_raster, output_dir, options)
Create tile pyramid out of input raster.
8,171
def _move_end_to_cxn(self, shape, cxn_pt_idx): x, y, cx, cy = shape.left, shape.top, shape.width, shape.height self.end_x, self.end_y = { 0: (int(x + cx/2), y), 1: (x, int(y + cy/2)), 2: (int(x + cx/2), y + cy), 3: (x + cx, int(y + cy/2)), }[cxn_pt_idx]
Move the end point of this connector to the coordinates of the connection point of *shape* specified by *cxn_pt_idx*.
8,172
def init0(self, dae): dae.y[self.v] = self.v0 dae.y[self.q] = mul(self.u, self.qg)
Set initial voltage and reactive power for PQ. Overwrites Bus.voltage values
8,173
def verify_event_source_current(self, event_uuid, resource_name, service_name, function_arn): client = self._client() try: attributes = client.get_event_source_mapping(UUID=event_uuid) actual_arn = attributes[] arn_start, actual_name = actual_arn.rsplit(, 1) return ( actual_name == resource_name and arn_start.startswith( % service_name) and attributes[] == function_arn ) except client.exceptions.ResourceNotFoundException: return False
Check if the uuid matches the resource and function arn provided. Given a uuid representing an event source mapping for a lambda function, verify that the associated source arn and function arn match up to the parameters passed in. Instead of providing the event source arn, the resource name is provided along with the service name. For example, if we're checking an SQS queue event source, the resource name would be the queue name (e.g. ``myqueue``) and the service would be ``sqs``.
8,174
def _check_split_list_validity(self): if not (hasattr(self,"_splitListsSet") and (self._splitListsSet)): return False elif len(self) != self._splitListsLength: return False else: return True
See _temporal_split_list above. This function checks if the current split lists are still valid.
8,175
def json(self): if not in self.environ.get(, ): raise BadRequest() try: return loads(self.data) except Exception: raise BadRequest()
Get the result of simplejson.loads if possible.
8,176
def rec_setattr(obj, attr, value): attrs = attr.split() setattr(reduce(getattr, attrs[:-1], obj), attrs[-1], value)
Set object's attribute. May use dot notation. >>> class C(object): pass >>> a = C() >>> a.b = C() >>> a.b.c = 4 >>> rec_setattr(a, 'b.c', 2) >>> a.b.c 2
8,177
def get_volume(self, datacenter_id, volume_id): response = self._perform_request( % (datacenter_id, volume_id)) return response
Retrieves a single volume by ID. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param volume_id: The unique ID of the volume. :type volume_id: ``str``
8,178
def get(self, path_segment="", owner=None, app=None, sharing=None, **query): if path_segment.startswith(): path = path_segment else: path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) return self.service.get(path, owner=owner, app=app, sharing=sharing, **query)
Performs a GET operation on the path segment relative to this endpoint. This method is named to match the HTTP method. This method makes at least one roundtrip to the server, one additional round trip for each 303 status returned, plus at most two additional round trips if the ``autologin`` field of :func:`connect` is set to ``True``. If *owner*, *app*, and *sharing* are omitted, this method takes a default namespace from the :class:`Service` object for this :class:`Endpoint`. All other keyword arguments are included in the URL as query parameters. :raises AuthenticationError: Raised when the ``Service`` is not logged in. :raises HTTPError: Raised when an error in the request occurs. :param path_segment: A path segment relative to this endpoint. :type path_segment: ``string`` :param owner: The owner context of the namespace (optional). :type owner: ``string`` :param app: The app context of the namespace (optional). :type app: ``string`` :param sharing: The sharing mode for the namespace (optional). :type sharing: "global", "system", "app", or "user" :param query: All other keyword arguments, which are used as query parameters. :type query: ``string`` :return: The response from the server. :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, and ``status`` **Example**:: import splunklib.client s = client.service(...) apps = s.apps apps.get() == \\ {'body': ...a response reader object..., 'headers': [('content-length', '26208'), ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), ('server', 'Splunkd'), ('connection', 'close'), ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), ('date', 'Fri, 11 May 2012 16:30:35 GMT'), ('content-type', 'text/xml; charset=utf-8')], 'reason': 'OK', 'status': 200} apps.get('nonexistant/path') # raises HTTPError s.logout() apps.get() # raises AuthenticationError
8,179
def _check_edgemap_registers(self, edge_map, keyregs, valregs, valreg=True): add_regs = set() reg_frag_chk = {} for v in keyregs.values(): reg_frag_chk[v] = {j: False for j in range(len(v))} for k in edge_map.keys(): if k[0].name in keyregs: reg_frag_chk[k[0]][k[1]] = True for k, v in reg_frag_chk.items(): s = set(v.values()) if len(s) == 2: raise DAGCircuitError("edge_map fragments reg %s" % k) elif s == set([False]): if k in self.qregs.values() or k in self.cregs.values(): raise DAGCircuitError("unmapped duplicate reg %s" % k) else: add_regs.add(k) else: if valreg: if not edge_map[(k, 0)][0].name in valregs: size = max(map(lambda x: x[1], filter(lambda x: x[0] == edge_map[(k, 0)][0], edge_map.values()))) qreg = QuantumRegister(size + 1, edge_map[(k, 0)][0].name) add_regs.add(qreg) return add_regs
Check that wiremap neither fragments nor leaves duplicate registers. 1. There are no fragmented registers. A register in keyregs is fragmented if not all of its (qu)bits are renamed by edge_map. 2. There are no duplicate registers. A register is duplicate if it appears in both self and keyregs but not in edge_map. Args: edge_map (dict): map from (reg,idx) in keyregs to (reg,idx) in valregs keyregs (dict): a map from register names to Register objects valregs (dict): a map from register names to Register objects valreg (bool): if False the method ignores valregs and does not add regs for bits in the edge_map image that don't appear in valregs Returns: set(Register): the set of regs to add to self Raises: DAGCircuitError: if the wiremap fragments, or duplicates exist
8,180
def info2lists(info, in_place=False): if not in info and not in info: return info if in_place: info_lists = info else: info_lists = info.copy() packages = info.get() if packages: info_lists[] = list(packages.values()) releases = info.get() if releases: info_lists[] = list(releases.values()) return info_lists
Return info with: 1) `packages` dict replaced by a 'packages' list with indexes removed 2) `releases` dict replaced by a 'releases' list with indexes removed info2list(info2dicts(info)) == info
8,181
def send_async( self, queue_identifier: QueueIdentifier, message: Message, ): receiver_address = queue_identifier.recipient if not is_binary_address(receiver_address): raise ValueError(.format(pex(receiver_address))) if isinstance(message, (Delivered, Ping, Pong)): raise ValueError( .format(message.__class__.__name__), ) self.log.debug( , receiver_address=pex(receiver_address), message=message, queue_identifier=queue_identifier, ) self._send_with_retry(queue_identifier, message)
Queue the message for sending to recipient in the queue_identifier It may be called before transport is started, to initialize message queues The actual sending is started only when the transport is started
8,182
def plot_one_day(x, y, xlabel=None, ylabel=None, title=None, ylim=None): plt.close("all") fig = plt.figure(figsize=(20, 10)) ax = fig.add_subplot(111) ax.plot(x, y) hours = HourLocator(range(24)) hoursFmt = DateFormatter("%H:%M") minutes = MinuteLocator([30,]) minutesFmt = DateFormatter("%M") ax.xaxis.set_major_locator(hours) ax.xaxis.set_major_formatter(hoursFmt) ax.xaxis.set_minor_locator(minutes) ax.xaxis.set_minor_formatter(minutesFmt) ax.autoscale_view() ax.grid() plt.setp( ax.xaxis.get_majorticklabels(), rotation=90 ) plt.setp( ax.xaxis.get_minorticklabels(), rotation=90 ) if xlabel: plt.xlabel(xlabel) else: plt.xlabel("Time") if ylabel: plt.ylabel(ylabel) else: plt.ylabel("Value") if title: plt.title(title) else: plt.title(str(x[0].date())) if ylim: plt.ylim(ylim) else: plt.ylim([min(y) - (max(y) - min(y) ) * 0.05, max(y) + (max(y) - min(y) ) * 0.05]) return plt, ax
时间跨度为一天。 major tick = every hours minor tick = every 15 minutes
8,183
def critical_path(self, print_cp=True, cp_limit=100): critical_paths = [] wire_src_map, dst_map = self.block.net_connections() def critical_path_pass(old_critical_path, first_wire): if isinstance(first_wire, (Input, Const, Register)): critical_paths.append((first_wire, old_critical_path)) return if len(critical_paths) >= cp_limit: raise self._TooManyCPsError() source = wire_src_map[first_wire] critical_path = [source] critical_path.extend(old_critical_path) arg_max_time = max(self.timing_map[arg_wire] for arg_wire in source.args) for arg_wire in source.args: if self.timing_map[arg_wire] == arg_max_time: critical_path_pass(critical_path, arg_wire) max_time = self.max_length() try: for wire_pair in self.timing_map.items(): if wire_pair[1] == max_time: critical_path_pass([], wire_pair[0]) except self._TooManyCPsError: print("Critical path count limit reached") if print_cp: self.print_critical_paths(critical_paths) return critical_paths
Takes a timing map and returns the critical paths of the system. :param print_cp: Whether to print the critical path to the terminal after calculation :return: a list containing tuples with the 'first' wire as the first value and the critical paths (which themselves are lists of nets) as the second
8,184
def write_length_and_key(fp, value): written = write_fmt(fp, , 0 if value in _TERMS else len(value)) written += write_bytes(fp, value) return written
Helper to write descriptor key.
8,185
def get(self, uuid): for token in self.list(): if token.get() == uuid: return token raise LinShareException(-1, "Can find uuid:" + uuid)
Workaround: missing get entry point
8,186
def set(self, option, value): if self.config is None: self.config = {} self.config[option] = value
Sets an option to a value.
8,187
def demo(quiet, shell, speed, prompt, commentecho): run( DEMO, shell=shell, speed=speed, test_mode=TESTING, prompt_template=prompt, quiet=quiet, commentecho=commentecho, )
Run a demo doitlive session.
8,188
def get_diplomacy(self): if not self._cache[]: self.get_teams() player_num = 0 computer_num = 0 for player in self._header.scenario.game_settings.player_info: if player.type == : player_num += 1 elif player.type == : computer_num += 1 total_num = player_num + computer_num diplomacy = { : (len(self._cache[]) == total_num) and total_num > 2, : len(self._cache[]) == 2 and total_num > 2, : total_num == 2, } diplomacy[] = team_sizes = sorted([len(team) for team in self._cache[]]) diplomacy[] = .join([str(size) for size in team_sizes]) if diplomacy[]: diplomacy[] = diplomacy[] = elif diplomacy[]: diplomacy[] = elif diplomacy[]: diplomacy[] = return diplomacy
Compute diplomacy.
8,189
def basic_clean_str(string): string = re.sub(r"\n", " ", string) string = re.sub(r"\s", string) string = re.sub(r"\’s", " \ve", " have", string) string = re.sub(r"\’", " ", string) string = re.sub(r"\.", " . ", string) string = re.sub(r"\,", " , ", string) string = re.sub(r"\!", " ! ", string) string = re.sub(r"\-", " ", string) string = re.sub(r"\(", " ", string) string = re.sub(r"\)", " ", string) string = re.sub(r"\]", " ", string) string = re.sub(r"\[", " ", string) string = re.sub(r"\?", " ", string) string = re.sub(r"\>", " ", string) string = re.sub(r"\<", " ", string) string = re.sub(r"\=", " ", string) string = re.sub(r"\;", " ", string) string = re.sub(r"\;", " ", string) string = re.sub(r"\:", " ", string) string = re.sub(r"\"", " ", string) string = re.sub(r"\$", " ", string) string = re.sub(r"\_", " ", string) string = re.sub(r"\s{2,}", " ", string) return string.strip().lower()
Tokenization/string cleaning for a datasets.
8,190
def log_to_syslog(): rl = logging.getLogger() rl.setLevel() stderr = logging.StreamHandler(stream=sys.stderr) stderr.setLevel(logging.CRITICAL) stderr.setFormatter(logging.Formatter( )) rl.addHandler(stderr) rl.addHandler(syslog)
Configure logging to syslog.
8,191
def _recv_ack(self, method_frame): if self._ack_listener: delivery_tag = method_frame.args.read_longlong() multiple = method_frame.args.read_bit() if multiple: while self._last_ack_id < delivery_tag: self._last_ack_id += 1 self._ack_listener(self._last_ack_id) else: self._last_ack_id = delivery_tag self._ack_listener(self._last_ack_id)
Receive an ack from the broker.
8,192
def allow(self, role, method, resource, with_children=True): if with_children: for r in role.get_children(): permission = (r.get_name(), method, resource) if permission not in self._allowed: self._allowed.append(permission) if role == : permission = (role, method, resource) else: permission = (role.get_name(), method, resource) if permission not in self._allowed: self._allowed.append(permission)
Add allowing rules. :param role: Role of this rule. :param method: Method to allow in rule, include GET, POST, PUT etc. :param resource: Resource also view function. :param with_children: Allow role's children in rule as well if with_children is `True`
8,193
def to_float(option,value): if type(value) is str: try: value=float(value) except ValueError: pass return (option,value)
Converts string values to floats when appropriate
8,194
def _skip_spaces_and_peek(self): while 1: self.skip_chars(self.end, lambda x: x in self.spaces) c = self.peek() if not self.params.allow_comments: return c if c != : return c d = self.peek(1) if d == : self.skip_to(self.pos + 2) self._skip_singleline_comment() elif d == : self.skip_to(self.pos + 2) self._skip_multiline_comment() else: return c
Skips all spaces and comments. :return: The first character that follows the skipped spaces and comments or None if the end of the json string has been reached.
8,195
def features_properties_null_remove(obj): features = obj[] for i in tqdm(range(len(features))): if in features[i]: properties = features[i][] features[i][] = {p:properties[p] for p in properties if properties[p] is not None} return obj
Remove any properties of features in the collection that have entries mapping to a null (i.e., None) value
8,196
def main(): if len(sys.argv) > 0: if in sys.argv: print(main.__doc__) sys.exit() if in sys.argv: ind=sys.argv.index() file=sys.argv[ind+1] f=open(file,) data=f.readlines() else: data=sys.stdin.readlines() DIs= [] ofile = "" if in sys.argv: ind = sys.argv.index() ofile= sys.argv[ind+1] out = open(ofile, ) for line in data: if in line: rec=line.split() else: rec=line.split() DIs.append((float(rec[0]),float(rec[1]))) bpars=pmag.dobingham(DIs) output = % (bpars["dec"],bpars["inc"],bpars["Eta"],bpars["Edec"],bpars["Einc"],bpars["Zeta"],bpars["Zdec"],bpars["Zinc"],bpars["n"]) if ofile == "": print(output) else: out.write(output+)
NAME gobing.py DESCRIPTION calculates Bingham parameters from dec inc data INPUT FORMAT takes dec/inc as first two columns in space delimited file SYNTAX gobing.py [options] OPTIONS -f FILE to read from FILE -F, specifies output file name < filename for reading from standard input OUTPUT mean dec, mean inc, Eta, Deta, Ieta, Zeta, Zdec, Zinc, N
8,197
def itemmeta(self): response = self.get("_new") content = _load_atom(response, MATCH_ENTRY_CONTENT) return _parse_atom_metadata(content)
Returns metadata for members of the collection. Makes a single roundtrip to the server, plus two more at most if the ``autologin`` field of :func:`connect` is set to ``True``. :return: A :class:`splunklib.data.Record` object containing the metadata. **Example**:: import splunklib.client as client import pprint s = client.connect(...) pprint.pprint(s.apps.itemmeta()) {'access': {'app': 'search', 'can_change_perms': '1', 'can_list': '1', 'can_share_app': '1', 'can_share_global': '1', 'can_share_user': '1', 'can_write': '1', 'modifiable': '1', 'owner': 'admin', 'perms': {'read': ['*'], 'write': ['admin']}, 'removable': '0', 'sharing': 'user'}, 'fields': {'optional': ['author', 'configured', 'description', 'label', 'manageable', 'template', 'visible'], 'required': ['name'], 'wildcard': []}}
8,198
def _censor_with(x, range, value=None): return [val if range[0] <= val <= range[1] else value for val in x]
Censor any values outside of range with ``None``
8,199
def _check_state_value(cls): state_value = cls.context.get_config(, None) state_value = state_value or getattr( cls.context.new_class, cls.context.state_name, None ) if not state_value: raise ValueError( "Empty state is disallowed, yet no initial state is given!" ) state_value = ( cls.context .new_meta[] .translate(state_value) ) cls.context.state_value = state_value
Check initial state value - if is proper and translate it. Initial state is required.