Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
10,600
def post(self, request, *args, **kwargs): serializer = EventSerializer(data=request.data) if not serializer.is_valid(): return Response( {"accepted": False, "reason": serializer.errors}, status=400 ) data = serializer.validated_data event_type = { "ack": "ack", "nack": "nack", "delivery_report": "delivery_succeeded", }.get(data["event_type"]) accepted, reason = process_event( data["user_message_id"], event_type, data["nack_reason"], data["timestamp"] ) return Response( {"accepted": accepted, "reason": reason}, status=200 if accepted else 400 )
Checks for expect event types before continuing
10,601
def catalogFactory(name, **kwargs): fn = lambda member: inspect.isclass(member) and member.__module__==__name__ catalogs = odict(inspect.getmembers(sys.modules[__name__], fn)) if name not in list(catalogs.keys()): msg = "%s not found in catalogs:\n %s"%(name,list(kernels.keys())) logger.error(msg) msg = "Unrecognized catalog: %s"%name raise Exception(msg) return catalogs[name](**kwargs)
Factory for various catalogs.
10,602
def _Login(): if not clc.v2.V2_API_USERNAME or not clc.v2.V2_API_PASSWD: clc.v1.output.Status(,3,) raise(clc.APIV2NotEnabled) session = clc._REQUESTS_SESSION session.headers[] = "application/json" r = session.request("POST", "%s/v2/%s" % (clc.defaults.ENDPOINT_URL_V2,"authentication/login"), json={"username": clc.v2.V2_API_USERNAME, "password": clc.v2.V2_API_PASSWD}, verify=API._ResourcePath()) if r.status_code == 200: clc._LOGIN_TOKEN_V2 = r.json()[] clc.ALIAS = r.json()[] clc.LOCATION = r.json()[] elif r.status_code == 400: raise(Exception("Invalid V2 API login. %s" % (r.json()[]))) else: raise(Exception("Error logging into V2 API. Response code %s. message %s" % (r.status_code,r.json()[])))
Login to retrieve bearer token and set default accoutn and location aliases.
10,603
def cycles(cls, **kwargs): if "loc" not in kwargs: if "point" not in kwargs: if "lat" not in kwargs or "lon" not in kwargs: raise ValueError("You must supply some form of lat/lon coordinates") else: lat = kwargs.get("lat") lon = kwargs.get("lon") else: lat = kwargs.get("point").y lon = kwargs.get("point").x if "time" not in kwargs: raise ValueError("You must supply a datetime object") else: time = kwargs.get("time") else: lat = kwargs.get("loc").latitude lon = kwargs.get("loc").longitude time = kwargs.get("loc").time if time.tzinfo is None: time = time.replace(tzinfo=pytz.utc) original_zone = pytz.utc else: original_zone = time.tzinfo local_jd = time.timetuple().tm_yday utc_jd = time.astimezone(pytz.utc).timetuple().tm_yday comp = cmp(utc_jd, local_jd) if comp == 1: utc_jd -= 1 elif comp == -1: utc_jd += 1 time = time.replace(hour=0, minute=0, second=0, microsecond=0) rising_h, rising_m = cls._calc(jd=utc_jd, lat=lat, lon=lon, stage=cls.RISING) setting_h, setting_m = cls._calc(jd=utc_jd, lat=lat, lon=lon, stage=cls.SETTING) rising = time.replace(tzinfo=pytz.utc) + timedelta(hours=rising_h, minutes=rising_m) setting = time.replace(tzinfo=pytz.utc) + timedelta(hours=setting_h, minutes=setting_m) if setting < rising: setting = setting + timedelta(hours=24) rising = rising.astimezone(original_zone) setting = setting.astimezone(original_zone) return { cls.RISING : rising, cls.SETTING : setting}
Classmethod for convienence in returning both the sunrise and sunset based on a location and date. Always calculates the sunrise and sunset on the given date, no matter the time passed into the function in the datetime object. Parameters: loc = Location4D (object) OR point = Shapely point (object) time = datetime in UTC (object) OR lat = latitude (float) lon = longitude (float) time = datetime in UTC (object) Returns: { 'sunrise': datetime in UTC, 'sunset': datetime in UTC } Sources: http://williams.best.vwh.net/sunrise_sunset_example.htm
10,604
def min(self): return int(self._min) if not np.isinf(self._min) else self._min
Returns the minimum value of the domain. :rtype: `float` or `np.inf`
10,605
async def _execute( self, transforms: List["OutputTransform"], *args: bytes, **kwargs: bytes ) -> None: self._transforms = transforms try: if self.request.method not in self.SUPPORTED_METHODS: raise HTTPError(405) self.path_args = [self.decode_argument(arg) for arg in args] self.path_kwargs = dict( (k, self.decode_argument(v, name=k)) for (k, v) in kwargs.items() ) if self.request.method not in ( "GET", "HEAD", "OPTIONS", ) and self.application.settings.get("xsrf_cookies"): self.check_xsrf_cookie() result = self.prepare() if result is not None: result = await result if self._prepared_future is not None: future_set_result_unless_cancelled(self._prepared_future, None) if self._finished: return if _has_stream_request_body(self.__class__): try: await self.request._body_future except iostream.StreamClosedError: return method = getattr(self, self.request.method.lower()) result = method(*self.path_args, **self.path_kwargs) if result is not None: result = await result if self._auto_finish and not self._finished: self.finish() except Exception as e: try: self._handle_request_exception(e) except Exception: app_log.error("Exception in exception handler", exc_info=True) finally: result = None if self._prepared_future is not None and not self._prepared_future.done(): self._prepared_future.set_result(None)
Executes this request with the given output transforms.
10,606
def len(self,resolution=1.0,units=None,conversion_function=convert_time, end_at_end=True): if units is not None: resolution = conversion_function(resolution,from_units=units,to_units=self.units) else: units = self.units if self.min is None: return int(self.max / resolution) if self.max is None: return 0 if units != and end_at_end: return int(np.ceil((self.max - self.min) / resolution)) return int(np.ceil((self.max - self.min) / resolution) + 1)
Calculates the length of the Label Dimension from its minimum, maximum and wether it is discrete. `resolution`: `units`: output units `conversion_function`: `end_at_end`: additional switch for continuous behaviour
10,607
def upsert(self, name, value=None, seq=None): for ctr, entry in enumerate(self.data): if entry[TNAME]==name: new_tuple = (name, value, entry[TLIST], entry[TSEQ]) self.data[ctr]=new_tuple return False new_tuple = (name, value, [], lib._seq(self, seq)) self.data.append(new_tuple) return True
Add one name/value entry to the main context of the rolne, but only if an entry with that name does not already exist. If the an entry with name exists, then the first entry found has it's value changed. NOTE: the upsert only updates the FIRST entry with the name found. The method returns True if an insertion occurs, otherwise False. Example of use: >>> # setup an example rolne first >>> my_var = rolne() >>> my_var.upsert("item", "zing") True >>> my_var["item", "zing"].append("color", "blue") >>> print my_var %rolne: item = zing color = blue <BLANKLINE> >>> my_var.upsert("item", "zing") False >>> print my_var %rolne: item = zing color = blue <BLANKLINE> >>> my_var.upsert("item", "broom") False >>> print my_var %rolne: item = broom color = blue <BLANKLINE> .. versionadded:: 0.1.1 :param name: The key name of the name/value pair. :param value: The key value of the name/value pair. If not passed, then the value is assumed to be None. :returns: Returns True if the name/value was newly inserted. Otherwise, it returns False indicated that an update was done instead.
10,608
def run_forever(self): cnt = 0 while True: cnt += 1 self.url = next(self.urls) log.debug("Trying to connect to node %s" % self.url) try: self.ws = websocket.WebSocketApp( self.url, on_message=self.on_message, on_error=self.on_error, on_close=self.on_close, on_open=self.on_open, ) self.ws.run_forever() except websocket.WebSocketException as exc: if self.num_retries >= 0 and cnt > self.num_retries: raise NumRetriesReached() sleeptime = (cnt - 1) * 2 if cnt < 10 else 10 if sleeptime: log.warning( "Lost connection to node during wsconnect(): %s (%d/%d) " % (self.url, cnt, self.num_retries) + "Retrying in %d seconds" % sleeptime ) time.sleep(sleeptime) except KeyboardInterrupt: self.ws.keep_running = False raise except Exception as e: log.critical("{}\n\n{}".format(str(e), traceback.format_exc()))
This method is used to run the websocket app continuously. It will execute callbacks as defined and try to stay connected with the provided APIs
10,609
def __populate_repositories_of_interest(self, username): user = self.github.get_user(username) self.user_starred_repositories.extend(user.get_starred()) if self.deep_dive: for following_user in user.get_following(): self.user_following_starred_repositories.extend( following_user.get_starred() )
Method to populate repositories which will be used to suggest repositories for the user. For this purpose we use two kinds of repositories. 1. Repositories starred by user him/herself. 2. Repositories starred by the users followed by the user. :param username: Username for the user for whom repositories are being suggested for.
10,610
def get_indexes(self, default_indexes=None): doctype = self.type.get_mapping_type_name() indexes = (settings.ES_INDEXES.get(doctype) or settings.ES_INDEXES[]) if isinstance(indexes, six.string_types): indexes = [indexes] return super(S, self).get_indexes(default_indexes=indexes)
Returns the list of indexes to act on based on ES_INDEXES setting
10,611
def create_dialog(self): box0 = QGroupBox() self.name = FormStr() self.name.setText() self.idx_group.activated.connect(self.update_channels) form = QFormLayout(box0) form.addRow(, self.name) form.addRow(, self.idx_group) form.addRow(, self.idx_chan) form.addRow(, self.idx_cycle) form.addRow(, self.idx_stage) box1 = QGroupBox() mbox = QComboBox() method_list = SLOW_WAVE_METHODS for method in method_list: mbox.addItem(method) self.idx_method = mbox self.method = mbox.currentText() mbox.currentIndexChanged.connect(self.update_values) self.index[] = FormFloat() self.index[] = FormFloat() self.index[] = FormFloat() self.index[] = FormFloat() self.index[] = FormFloat() self.index[] = FormFloat() self.index[] = FormFloat() self.index[] = FormFloat() form = QFormLayout(box1) form.addRow(, mbox) form.addRow(, self.index[]) form.addRow(, self.index[]) form.addRow(, self.index[]) form.addRow(, self.index[]) form.addRow(, self.index[]) form.addRow(, self.index[]) form.addRow(, self.index[]) form.addRow(, self.index[]) box3 = QGroupBox() self.index[] = FormBool() self.index[] = FormBool() self.index[] = FormBool() self.index[] = FormMenu([, , ]) self.index[] = FormFloat(5) self.index[].set_value(True) self.index[].set_value(True) form = QFormLayout(box3) form.addRow(self.index[]) form.addRow(, self.index[]) form.addRow(, self.index[]) form.addRow(self.index[]) form.addRow(self.index[]) self.bbox.clicked.connect(self.button_clicked) btnlayout = QHBoxLayout() btnlayout.addStretch(1) btnlayout.addWidget(self.bbox) vlayout = QVBoxLayout() vlayout.addWidget(box1) vlayout.addWidget(box3) vlayout.addStretch(1) vlayout.addLayout(btnlayout) hlayout = QHBoxLayout() hlayout.addWidget(box0) hlayout.addLayout(vlayout) self.update_values() self.setLayout(hlayout)
Create the dialog.
10,612
def set_distribute_compositions(self, distribute_comps=None): if distribute_comps is None: raise NullArgument() metadata = Metadata(**settings.METADATA[]) if metadata.is_read_only(): raise NoAccess() if self._is_valid_input(distribute_comps, metadata, array=False): self._my_map[] = distribute_comps else: raise InvalidArgument()
Sets the distribution rights. This sets distribute verbatim to ``true``. :param distribute_comps: right to distribute modifications :type distribute_comps: ``boolean`` :raise: ``InvalidArgument`` -- ``distribute_comps`` is invalid :raise: ``NoAccess`` -- authorization failure *compliance: mandatory -- This method must be implemented.*
10,613
def p_field_id(self, p): : if len(p) == 3: if p[1] == 0: % p.lineno(1) ) p[0] = p[1] else: p[0] = None
field_id : INTCONSTANT ':' |
10,614
def list(declared, undeclared): queues = current_queues.queues.values() if declared: queues = filter(lambda queue: queue.exists, queues) elif undeclared: queues = filter(lambda queue: not queue.exists, queues) queue_names = [queue.routing_key for queue in queues] queue_names.sort() for queue in queue_names: click.secho(queue)
List configured queues.
10,615
def Uninstall(self, package_name, keep_data=False, timeout_ms=None): cmd = [] if keep_data: cmd.append() cmd.append( % package_name) return self.Shell(.join(cmd), timeout_ms=timeout_ms)
Removes a package from the device. Args: package_name: Package name of target package. keep_data: whether to keep the data and cache directories timeout_ms: Expected timeout for pushing and installing. Returns: The pm uninstall output.
10,616
def rpc_get_completions(self, filename, source, offset): results = self._call_backend("rpc_get_completions", [], filename, get_source(source), offset) results = list(dict((res[], res) for res in results) .values()) results.sort(key=lambda cand: _pysymbol_key(cand["name"])) return results
Get a list of completion candidates for the symbol at offset.
10,617
def _get_parsing_plan_for_multifile_children(self, obj_on_fs: PersistedObject, desired_type: Type[Any], logger: Logger) -> Dict[str, Any]: raise Exception()
Implementation of AnyParser API
10,618
def _committors(sources, sinks, tprob): n_states = np.shape(tprob)[0] sources = np.array(sources, dtype=int).reshape((-1, 1)) sinks = np.array(sinks, dtype=int).reshape((-1, 1)) lhs = np.eye(n_states) - tprob for a in sources: lhs[a, :] = 0.0 lhs[:, a] = 0.0 lhs[a, a] = 1.0 for b in sinks: lhs[b, :] = 0.0 lhs[:, b] = 0.0 lhs[b, b] = 1.0 ident_sinks = np.zeros(n_states) ident_sinks[sinks] = 1.0 rhs = np.dot(tprob, ident_sinks) rhs[sources] = 0.0 rhs[sinks] = 1.0 forward_committors = np.linalg.solve(lhs, rhs) return forward_committors
Get the forward committors of the reaction sources -> sinks. Parameters ---------- sources : array_like, int The set of unfolded/reactant states. sinks : array_like, int The set of folded/product states. tprob : np.ndarray Transition matrix Returns ------- forward_committors : np.ndarray The forward committors for the reaction sources -> sinks References ---------- .. [1] Weinan, E. and Vanden-Eijnden, E. Towards a theory of transition paths. J. Stat. Phys. 123, 503-523 (2006). .. [2] Metzner, P., Schutte, C. & Vanden-Eijnden, E. Transition path theory for Markov jump processes. Multiscale Model. Simul. 7, 1192-1219 (2009). .. [3] Berezhkovskii, A., Hummer, G. & Szabo, A. Reactive flux and folding pathways in network models of coarse-grained protein dynamics. J. Chem. Phys. 130, 205102 (2009). .. [4] Noe, Frank, et al. "Constructing the equilibrium ensemble of folding pathways from short off-equilibrium simulations." PNAS 106.45 (2009): 19011-19016.
10,619
async def shuffle(self): self.logger.debug("shuffle command") if not self.state == : return self.statuslog.debug("Shuffling") random.shuffle(self.queue) self.update_queue() self.statuslog.debug("Shuffled")
The shuffle command
10,620
async def prepare_container(self, size, container, elem_type=None): if not self.writing: if container is None: return gen_elem_array(size, elem_type) fvalue = get_elem(container) if fvalue is None: fvalue = [] fvalue += gen_elem_array(max(0, size - len(fvalue)), elem_type) set_elem(container, fvalue) return fvalue
Prepares container for serialization :param size: :param container: :return:
10,621
def select_peaks(data, events, limit): selected = abs(data[events[:, 1]]) >= abs(limit) return events[selected, :]
Check whether event satisfies amplitude limit. Parameters ---------- data : ndarray (dtype='float') vector with data events : ndarray (dtype='int') N x 2+ matrix with peak/trough in second position limit : float low and high limit for spindle duration Returns ------- ndarray (dtype='int') N x 2+ matrix with peak/trough in second position
10,622
def update_installed_files(self, installed_files): pkg_info_files = [fn for fn in installed_files if fnmatch.fnmatch(fn, )] if len(pkg_info_files) != 1: logger.warning("Not tracking installed files (couldninstalled-files.txtw%s\n' % os.path.relpath(pathname, egg_info_directory))
Track the files installed by a package so pip knows how to remove the package. This method is used by :func:`install_binary_dist()` (which collects the list of installed files for :func:`update_installed_files()`). :param installed_files: A list of absolute pathnames (strings) with the files that were just installed.
10,623
def debugDumpAttr(self, output, depth): libxml2mod.xmlDebugDumpAttr(output, self._o, depth)
Dumps debug information for the attribute
10,624
def profile(self, tile=None): dst_metadata = dict(self._profile) if tile is not None: dst_metadata.update( width=tile.width, height=tile.height, affine=tile.affine, driver="PNG", crs=tile.crs ) return dst_metadata
Create a metadata dictionary for rasterio. Parameters ---------- tile : ``BufferedTile`` Returns ------- metadata : dictionary output profile dictionary used for rasterio.
10,625
def _render_templates(files, filetable, written_files, force, open_mode=): for tpl_path, content in filetable: target_path = files[tpl_path] needdir = os.path.dirname(target_path) assert needdir, "Target should have valid parent dir" try: os.makedirs(needdir) except OSError as err: if err.errno != errno.EEXIST: raise if os.path.isfile(target_path): if force: LOG.warning("Forcing overwrite of existing file %s.", target_path) elif target_path in written_files: LOG.warning("Previous stencil has already written file %s.", target_path) else: print("Skipping existing file %s" % target_path) LOG.info("Skipping existing file %s", target_path) continue with open(target_path, open_mode) as newfile: print("Writing rendered file %s" % target_path) LOG.info("Writing rendered file %s", target_path) newfile.write(content) written_files.append(target_path)
Write template contents from filetable into files. Using filetable for the rendered templates, and the list of files, render all the templates into actual files on disk, forcing to overwrite the file as appropriate, and using the given open mode for the file.
10,626
def make_logger(scraper): logger = logging.getLogger() logger.setLevel(logging.DEBUG) requests_log = logging.getLogger("requests") requests_log.setLevel(logging.WARNING) json_handler = logging.FileHandler(log_path(scraper)) json_handler.setLevel(logging.DEBUG) json_formatter = jsonlogger.JsonFormatter(make_json_format()) json_handler.setFormatter(json_formatter) logger.addHandler(json_handler) console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) fmt = formatter = logging.Formatter(fmt) console_handler.setFormatter(formatter) logger.addHandler(console_handler) logger = logging.getLogger(scraper.name) logger = TaskAdapter(logger, scraper) return logger
Create two log handlers, one to output info-level ouput to the console, the other to store all logging in a JSON file which will later be used to generate reports.
10,627
def do_format(value, *args, **kwargs): if args and kwargs: raise FilterArgumentError(t handle positional and keyword arguments at the same time') return soft_unicode(value) % (kwargs or args)
Apply python string formatting on an object: .. sourcecode:: jinja {{ "%s - %s"|format("Hello?", "Foo!") }} -> Hello? - Foo!
10,628
def checkInputParameter(method, parameters, validParameters, requiredParameters=None): for parameter in parameters: if parameter not in validParameters: raise dbsClientException("Invalid input", "API %s does not support parameter %s. Supported parameters are %s" \ % (method, parameter, validParameters)) if requiredParameters is not None: if in requiredParameters: match = False for requiredParameter in requiredParameters[]: if requiredParameter!= and requiredParameter in parameters: match = True break if not match: raise dbsClientException("Invalid input", "API %s does require one of the parameters %s" \ % (method, requiredParameters[])) if in requiredParameters: for requiredParameter in requiredParameters[]: if requiredParameter not in parameters: raise dbsClientException("Invalid input", "API %s does require the parameter %s. Forced required parameters are %s" \ % (method, requiredParameter, requiredParameters[])) if in requiredParameters: overlap = [] for requiredParameter in requiredParameters[]: if requiredParameter in parameters: overlap.append(requiredParameter) if len(overlap) != 1: raise dbsClientException("Invalid input", "API %s does requires only *one* of the parameters %s." \ % (method, requiredParameters[]))
Helper function to check input by using before sending to the server :param method: Name of the API :type method: str :param validParameters: Allow parameters for the API call :type validParameters: list :param requiredParameters: Required parameters for the API call (Default: None) :type requiredParameters: list
10,629
def construct_rest_of_world(self, excluded, name=None, fp=None, geom=True): for location in excluded: assert location in self.locations, "Can't find location {}".format(location) included = self.all_faces.difference( set().union(*[set(self.data[loc]) for loc in excluded]) ) if not geom: return included elif not gis: warn(MISSING_GIS) return geom = _union(included)[1] if fp: self.write_geoms_to_file(fp, [geom], [name] if name else None) return fp else: return geom
Construct rest-of-world geometry and optionally write to filepath ``fp``. Excludes faces in location list ``excluded``. ``excluded`` must be an iterable of location strings (not face ids).
10,630
def gen3d_conformer(self): gen3d = ob.OBOp.FindType("Gen3D") gen3d.Do(self._obmol)
A combined method to first generate 3D structures from 0D or 2D structures and then find the minimum energy conformer: 1. Use OBBuilder to create a 3D structure using rules and ring templates 2. Do 250 steps of a steepest descent geometry optimization with the MMFF94 forcefield 3. Do 200 iterations of a Weighted Rotor conformational search (optimizing each conformer with 25 steps of a steepest descent) 4. Do 250 steps of a conjugate gradient geometry optimization. Warning from openbabel docs: For many applications where 100s if not 1000s of molecules need to be processed, gen3d is rather SLOW. Sometimes this function can cause a segmentation fault. A future version of Open Babel will provide options for slow/medium/fast 3D structure generation which will involve different compromises between speed and finding the global energy minimum.
10,631
def fig_to_svg(fig): buf = io.StringIO() fig.savefig(buf, format=) buf.seek(0) return buf.getvalue()
Helper function to convert matplotlib figure to SVG string Returns: str: figure as SVG string
10,632
def restart(self): self.master_clients = 0 self.create_new_client_if_empty = False for i in range(len(self.clients)): client = self.clients[-1] try: client.shutdown() except Exception as e: QMessageBox.warning(self, _(), _("It was not possible to restart the IPython console " "when switching to this project. The error was<br><br>" "<tt>{0}</tt>").format(e), QMessageBox.Ok) self.close_client(client=client, force=True) self.create_new_client(give_focus=False) self.create_new_client_if_empty = True
Restart the console This is needed when we switch projects to update PYTHONPATH and the selected interpreter
10,633
def _handle_browse(self, relpath, params): abspath = os.path.normpath(os.path.join(self._root, relpath)) if not abspath.startswith(self._root): raise ValueError if os.path.isdir(abspath): self._serve_dir(abspath, params) elif os.path.isfile(abspath): self._serve_file(abspath, params)
Handle requests to browse the filesystem under the build root.
10,634
def rebin(self, *factors, **kwargs): if not HAVE_SCIPY: raise NotImplementedError("Rebinning requires scipy.ndimage") if any([x != for x in kwargs.keys()]): raise ValueError("Only keyword argument is accepted. Yeah, this is confusing.. blame python 2.") order = kwargs.get(, 1) mh = self.similar_blank_histogram() if not len(factors) == self.dimensions: raise ValueError("You must pass %d rebin factors to rebin a %d-dimensional histogram" % ( self.dimensions, self.dimensions )) for i, f in enumerate(factors): x = self.bin_edges[i] mh.bin_edges[i] = np.interp( x=np.linspace(0, 1, (len(x) - 1) * f + 1), xp=np.linspace(0, 1, len(x)), fp=x) mh.histogram = zoom(self.histogram, factors, order=order) mh.histogram *= self.histogram.sum() / mh.histogram.sum() return mh
Return a new histogram that is 'rebinned' (zoomed) by factors (tuple of floats) along each dimensions factors: tuple with zoom factors along each axis. e.g. 2 = double number of bins, 0.5 = halve them. order: Order for spline interpolation in scipy.ndimage.zoom. Defaults to linear interpolation (order=1). The only accepted keyword argument is 'order'!!! (python 2 is not nice) The normalization is set to the normalization of the current histogram The factors don't have to be integers or fractions: scipy.ndimage.zoom deals with the rebinning arcana.
10,635
def _simple_dispatch(self, name, params): try: func = self.funcs[name] except KeyError: pass else: if isinstance(params, (list, tuple)): return func(*params) else: return func(**params) return self._dispatch_method(name, params)
Dispatch method
10,636
def expand_labels(labels, subtopic=False): labels = list(labels) assert all(lab.value == CorefValue.Positive for lab in labels) if len(labels) == 0: return annotator = labels[0].annotator_id data_backed = set() connected_component = set() for label in labels: ident1, ident2 = idents_from_label(label, subtopic=subtopic) data_backed.add(normalize_pair(ident1, ident2)) connected_component.add(ident1) connected_component.add(ident2) for ident1, ident2 in combinations(connected_component, 2): if normalize_pair(ident1, ident2) not in data_backed: (cid1, subid1), (cid2, subid2) = ident1, ident2 yield Label(cid1, cid2, annotator, CorefValue.Positive, subtopic_id1=subid1, subtopic_id2=subid2)
Expand a set of labels that define a connected component. ``labels`` must define a *positive* connected component: it is all of the edges that make up the *single* connected component in the :class:`LabelStore`. expand will ignore subtopic assignments, and annotator_id will be an arbitrary one selected from ``labels``. Note that this function only returns the expanded labels, which is guaranteed to be disjoint with the given ``labels``. This requirement implies that ``labels`` is held in memory to ensure that no duplicates are returned. If ``subtopic`` is ``True``, then it is assumed that ``labels`` defines a ``subtopic`` connected component. In this case, subtopics are included in the expanded labels. :param labels: iterable of :class:`Label` for the connected component. :rtype: generator of expanded :class:`Label`s only
10,637
def path(self, which=None): if which in (, , ): return .format( super(ForemanTask, self).path(), which ) return super(ForemanTask, self).path(which)
Extend ``nailgun.entity_mixins.Entity.path``. The format of the returned path depends on the value of ``which``: bulk_resume /foreman_tasks/api/tasks/bulk_resume bulk_search /foreman_tasks/api/tasks/bulk_search summary /foreman_tasks/api/tasks/summary Otherwise, call ``super``.
10,638
def concatenate_matrices(*matrices): M = numpy.identity(4) for i in matrices: M = numpy.dot(M, i) return M
Return concatenation of series of transformation matrices. >>> M = numpy.random.rand(16).reshape((4, 4)) - 0.5 >>> numpy.allclose(M, concatenate_matrices(M)) True >>> numpy.allclose(numpy.dot(M, M.T), concatenate_matrices(M, M.T)) True
10,639
def _augment_text_w_syntactic_info( self, text, text_layer ): j = 0 for sentence in text.divide( layer=WORDS, by=SENTENCES ): for i in range(len(sentence)): estnltkToken = sentence[i] vislcg3Token = text_layer[j] parse_found = False if PARSER_OUT in vislcg3Token: if len( vislcg3Token[PARSER_OUT] ) > 0: firstParse = vislcg3Token[PARSER_OUT][0] estnltkToken[] = str(i) estnltkToken[] = str(firstParse[1]) deprels = .join( [p[0] for p in vislcg3Token[PARSER_OUT]] ) estnltkToken[] = deprels parse_found = True if not parse_found: raise Exception("(!) Unable to retrieve syntactic analysis for the ",\ estnltkToken, , vislcg3Token ) j += 1
Augments given Text object with the syntactic information from the *text_layer*. More specifically, adds information about SYNTAX_LABEL, SYNTAX_HEAD and DEPREL to each token in the Text object; (!) Note: this method is added to provide some initial consistency with MaltParser based syntactic parsing; If a better syntactic parsing interface is achieved in the future, this method will be deprecated ...
10,640
def auctionWS(symbols=None, on_data=None): symbols = _strToList(symbols) sendinit = ({: symbols, : []},) return _stream(_wsURL(), sendinit, on_data)
https://iextrading.com/developer/docs/#auction
10,641
def read(address, length): arr = create_string_buffer(length) return i2c_msg( addr=address, flags=I2C_M_RD, len=length, buf=arr)
Prepares an i2c read transaction. :param address: Slave address. :type: address: int :param length: Number of bytes to read. :type: length: int :return: New :py:class:`i2c_msg` instance for read operation. :rtype: :py:class:`i2c_msg`
10,642
def insert(self, loc, column, value, allow_duplicates=False): self._ensure_valid_index(value) value = self._sanitize_column(column, value, broadcast=False) self._data.insert(loc, column, value, allow_duplicates=allow_duplicates)
Insert column into DataFrame at specified location. Raises a ValueError if `column` is already contained in the DataFrame, unless `allow_duplicates` is set to True. Parameters ---------- loc : int Insertion index. Must verify 0 <= loc <= len(columns) column : string, number, or hashable object label of the inserted column value : int, Series, or array-like allow_duplicates : bool, optional
10,643
def _set_binner(self): if self.binner is None: self.binner, self.grouper = self._get_binner()
Setup our binners. Cache these as we are an immutable object
10,644
def display_data_item(self, data_item: DataItem, source_display_panel=None, source_data_item=None): for display_panel in self.__document_controller.workspace_controller.display_panels: if display_panel.data_item == data_item._data_item: display_panel.request_focus() return DisplayPanel(display_panel) result_display_panel = self.__document_controller.next_result_display_panel() if result_display_panel: display_item = self.__document_controller.document_model.get_display_item_for_data_item(data_item._data_item) result_display_panel.set_display_panel_display_item(display_item) result_display_panel.request_focus() return DisplayPanel(result_display_panel) return None
Display a new data item and gives it keyboard focus. Uses existing display if it is already displayed. .. versionadded:: 1.0 Status: Provisional Scriptable: Yes
10,645
def run_script(pycode): if pycode[0] == "\n": pycode = pycode[1:] pycode.rstrip() pycode = textwrap.dedent(pycode) globs = {} six.exec_(pycode, globs, globs) return globs
Run the Python in `pycode`, and return a dict of the resulting globals.
10,646
def _check_status_errors(proto, content, error_traps=None): if content.status == proto.OK: return try: if content.status == proto.INTERNAL_ERROR: raise errors.UnknownValidatorError() except AttributeError: pass try: if content.status == proto.NOT_READY: raise errors.ValidatorNotReady() except AttributeError: pass try: if content.status == proto.NO_ROOT: raise errors.HeadNotFound() except AttributeError: pass try: if content.status == proto.INVALID_PAGING: raise errors.PagingInvalid() except AttributeError: pass try: if content.status == proto.INVALID_SORT: raise errors.SortInvalid() except AttributeError: pass if error_traps is not None: for trap in error_traps: trap.check(content.status)
Raises HTTPErrors based on error statuses sent from validator. Checks for common statuses and runs route specific error traps.
10,647
def registerAtomType(self, parameters): name = parameters[] if name in self._atomTypes: raise ValueError( + name) atom_class = parameters[] mass = _convertParameterToNumber(parameters[]) element = None if in parameters: element, custom = self._create_element(parameters[], mass) if custom: self.non_element_types[element.symbol] = element self._atomTypes[name] = self.__class__._AtomType(name, atom_class, mass, element) if atom_class in self._atomClasses: type_set = self._atomClasses[atom_class] else: type_set = set() self._atomClasses[atom_class] = type_set type_set.add(name) self._atomClasses[].add(name) name = parameters[] if in parameters: self.atomTypeDefinitions[name] = parameters[] if in parameters: overrides = set(atype.strip() for atype in parameters[].split(",")) if overrides: self.atomTypeOverrides[name] = overrides if in parameters: self.atomTypeDesc[name] = parameters[] if in parameters: dois = set(doi.strip() for doi in parameters[].split()) self.atomTypeRefs[name] = dois
Register a new atom type.
10,648
def _lazy_migration(self, patch=None, meta=None, toa=None): objects = yield self.revisions.find({"master_id": self.master_id}, limit=1) if len(objects) > 0: raise Return(objects) if not patch: patch = yield self.collection.find_one_by_id(self.master_id) if not toa: toa = long(time.mktime(datetime.datetime.now().timetuple())) meta["comment"] = "This document was migrated automatically." if isinstance(patch, dict) and patch.get("id"): del patch["id"] if isinstance(patch, dict) and patch.get("_id"): del patch["_id"] snapshot = copy.deepcopy(patch) snapshot["id"] = self.master_id snapshot["published"] = self.settings.get("scheduler", {}).get("lazy_migrated_published_by_default", False) legacy_revision = { "toa": toa, "processed": True, "collection": self.collection_name, "master_id": self.master_id, "action": self.INSERT_ACTION, "patch": self.collection._dictionary_to_cursor(patch), "snapshot": snapshot, "meta": meta, } response = yield self.revisions.insert(legacy_revision) if isinstance(response, str): raise Return([legacy_revision]) raise Return(None)
Handle when a revision scheduling is turned onto a collection that was previously not scheduleable. This method will create the first revision for each object before its every used in the context of scheduling. :param dict patch: The patch that should be used :param dict meta: Meta data for this action :param int toa: The time of action :return: A legacy revision for a document that was previously :rtype: list
10,649
def variational_expectations(self, Y, m, v, gh_points=None, Y_metadata=None): if gh_points is None: gh_x, gh_w = self._gh_points() else: gh_x, gh_w = gh_points shape = m.shape m,v,Y = m.flatten(), v.flatten(), Y.flatten() X = gh_x[None,:]*np.sqrt(2.*v[:,None]) + m[:,None] logp = self.logpdf(X,Y[:,None], Y_metadata=Y_metadata) dlogp_dx = self.dlogpdf_df(X, Y[:,None], Y_metadata=Y_metadata) d2logp_dx2 = self.d2logpdf_df2(X, Y[:,None], Y_metadata=Y_metadata) F = np.dot(logp, gh_w)/np.sqrt(np.pi) dF_dm = np.dot(dlogp_dx, gh_w)/np.sqrt(np.pi) dF_dv = np.dot(d2logp_dx2, gh_w)/np.sqrt(np.pi) dF_dv /= 2. if np.any(np.isnan(dF_dv)) or np.any(np.isinf(dF_dv)): stop if np.any(np.isnan(dF_dm)) or np.any(np.isinf(dF_dm)): stop if self.size: dF_dtheta = self.dlogpdf_dtheta(X, Y[:,None], Y_metadata=Y_metadata) dF_dtheta = np.dot(dF_dtheta, gh_w)/np.sqrt(np.pi) dF_dtheta = dF_dtheta.reshape(self.size, shape[0], shape[1]) else: dF_dtheta = None return F.reshape(*shape), dF_dm.reshape(*shape), dF_dv.reshape(*shape), dF_dtheta
Use Gauss-Hermite Quadrature to compute E_p(f) [ log p(y|f) ] d/dm E_p(f) [ log p(y|f) ] d/dv E_p(f) [ log p(y|f) ] where p(f) is a Gaussian with mean m and variance v. The shapes of Y, m and v should match. if no gh_points are passed, we construct them using defualt options
10,650
def CleanseRawStrings(raw_lines): delimiter = None lines_without_raw_strings = [] for line in raw_lines: if delimiter: end = line.find(delimiter) if end >= 0: leading_space = Match(r, line) line = leading_space.group(1) + + line[end + len(delimiter):] delimiter = None else: else: break lines_without_raw_strings.append(line) return lines_without_raw_strings
Removes C++11 raw strings from lines. Before: static const char kData[] = R"( multi-line string )"; After: static const char kData[] = "" (replaced by blank line) ""; Args: raw_lines: list of raw lines. Returns: list of lines with C++11 raw strings replaced by empty strings.
10,651
def set_redis(self, redis_url, redis_timeout=10): self._redis_url = redis_url self._redis_timeout = redis_timeout if isinstance(redis_timeout, int) and redis_timeout > 0 else 10 try: self._redis = redis.StrictRedis.from_url(url=redis_url) self._redis.ping() except (ConnectionError, TimeoutError): raise CartolaFCError()
Realiza a autenticação no servidor Redis utilizando a URL informada. Args: redis_url (str): URL para conectar ao servidor Redis, exemplo: redis://user:password@localhost:6379/2. redis_timeout (int): O timeout padrão (em segundos). kwargs (dict): Raises: cartolafc.CartolaFCError: Se não for possível se conectar ao servidor Redis
10,652
def authenticated_userid(request): user = getattr(request, , None) key = user.pk_field() return getattr(user, key)
Helper function that can be used in ``db_key`` to support `self` as a collection key.
10,653
def git(self, *arguments): process = subprocess.Popen([] + list(arguments), stdout=subprocess.PIPE, cwd=self.cwd) out = process.communicate()[0].decode() code = process.returncode return code, out
Return (exit code, output) from git.
10,654
def create_arrow(rows, cols, radius=0.1, length=1.0, cone_radius=None, cone_length=None): md_cyl = None if cone_radius is None: cone_radius = radius*2.0 if cone_length is None: con_L = length/3.0 cyl_L = length*2.0/3.0 else: cyl_L = max(0, length - cone_length) con_L = min(cone_length, length) if cyl_L != 0: md_cyl = create_cylinder(rows, cols, radius=[radius, radius], length=cyl_L) md_con = create_cone(cols, radius=cone_radius, length=con_L) verts = md_con.get_vertices() nbr_verts_con = verts.size//3 faces = md_con.get_faces() if md_cyl is not None: trans = np.array([[0.0, 0.0, cyl_L]]) verts = np.vstack((verts+trans, md_cyl.get_vertices())) faces = np.vstack((faces, md_cyl.get_faces()+nbr_verts_con)) return MeshData(vertices=verts, faces=faces)
Create a 3D arrow using a cylinder plus cone Parameters ---------- rows : int Number of rows. cols : int Number of columns. radius : float Base cylinder radius. length : float Length of the arrow. cone_radius : float Radius of the cone base. If None, then this defaults to 2x the cylinder radius. cone_length : float Length of the cone. If None, then this defaults to 1/3 of the arrow length. Returns ------- arrow : MeshData Vertices and faces computed for a cone surface.
10,655
def get_version(module=): with open(os.path.join(HERE, module, ), ) as f: data = f.read() lines = data.split() for line in lines: if line.startswith(): version_tuple = ast.literal_eval(line.split()[-1].strip()) version = .join(map(str, version_tuple)) break return version
Get version.
10,656
def remove(name=None, slot=None, fromrepo=None, pkgs=None, **kwargs): s control group. This is done to keep systemd from killing any emerge commands spawned by Salt when the ``salt-minion`` service is restarted. (see ``KillMode`` in the `systemd.kill(5)`_ manpage for more information). If desired, usage of `systemd-run(1)`_ can be suppressed by setting a :mod:`config option <salt.modules.config.get>` called ``systemd.scope``, with a value of ``False`` (no quotes). .. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html .. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html Remove packages via emerge --unmerge. name The name of the package to be deleted. slot Restrict the remove to a specific slot. Ignored if ``name`` is None. fromrepo Restrict the remove to a specific slot. Ignored if ``name`` is None. Multiple Package Options: pkgs Uninstall multiple packages. ``slot`` and ``fromrepo`` arguments are ignored if this argument is present. Must be passed as a python list. .. versionadded:: 0.16.0 Returns a dict containing the changes. CLI Example: .. code-block:: bash salt pkg.remove <package name> salt pkg.remove <package name> slot=4.4 fromrepo=gentoo salt pkg.remove <package1>,<package2>,<package3> salt pkg.remove pkgs= pkg_resource.parse_targets{0}:{1}{0}::{1}config.getsystemd.scopesystemd-run--scopeemerge--askn--quiet--unmerge--quiet-unmerge-warncmd.run_alltraceretcodestderrstderrpkg.list_pkgsProblem encountered removing package(s)errorschanges': ret} ) return ret
.. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 On minions running systemd>=205, `systemd-run(1)`_ is now used to isolate commands which modify installed packages from the ``salt-minion`` daemon's control group. This is done to keep systemd from killing any emerge commands spawned by Salt when the ``salt-minion`` service is restarted. (see ``KillMode`` in the `systemd.kill(5)`_ manpage for more information). If desired, usage of `systemd-run(1)`_ can be suppressed by setting a :mod:`config option <salt.modules.config.get>` called ``systemd.scope``, with a value of ``False`` (no quotes). .. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html .. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html Remove packages via emerge --unmerge. name The name of the package to be deleted. slot Restrict the remove to a specific slot. Ignored if ``name`` is None. fromrepo Restrict the remove to a specific slot. Ignored if ``name`` is None. Multiple Package Options: pkgs Uninstall multiple packages. ``slot`` and ``fromrepo`` arguments are ignored if this argument is present. Must be passed as a python list. .. versionadded:: 0.16.0 Returns a dict containing the changes. CLI Example: .. code-block:: bash salt '*' pkg.remove <package name> salt '*' pkg.remove <package name> slot=4.4 fromrepo=gentoo salt '*' pkg.remove <package1>,<package2>,<package3> salt '*' pkg.remove pkgs='["foo", "bar"]'
10,657
def register_writer(klass): if not callable(klass): raise ValueError("Can only register callables as engines") engine_name = klass.engine _writers[engine_name] = klass
Add engine to the excel writer registry.io.excel. You must use this method to integrate with ``to_excel``. Parameters ---------- klass : ExcelWriter
10,658
def get_random_label(): return .join(random.choice(string.ascii_uppercase + string.digits) \ for _ in range(15))
Get a random label string to use when clustering jobs.
10,659
def _reset_bbox(self): scale_x, scale_y = self.get_scale_xy() pan_x, pan_y = self.get_pan(coord=)[:2] win_wd, win_ht = self.get_window_size() win_wd, win_ht = max(1, win_wd), max(1, win_ht) self._calc_bg_dimensions(scale_x, scale_y, pan_x, pan_y, win_wd, win_ht)
This function should only be called internally. It resets the viewers bounding box based on changes to pan or scale.
10,660
def set_verify_depth(self, depth): if not isinstance(depth, integer_types): raise TypeError("depth must be an integer") _lib.SSL_CTX_set_verify_depth(self._context, depth)
Set the maximum depth for the certificate chain verification that shall be allowed for this Context object. :param depth: An integer specifying the verify depth :return: None
10,661
def apply_noise_model(prog, noise_model): new_prog = _noise_model_program_header(noise_model) for i in prog: if isinstance(i, Gate): try: _, new_name = get_noisy_gate(i.name, tuple(i.params)) new_prog += Gate(new_name, [], i.qubits) except NoisyGateUndefined: new_prog += i else: new_prog += i return new_prog
Apply a noise model to a program and generated a 'noisy-fied' version of the program. :param Program prog: A Quil Program object. :param NoiseModel noise_model: A NoiseModel, either generated from an ISA or from a simple decoherence model. :return: A new program translated to a noisy gateset and with noisy readout as described by the noisemodel. :rtype: Program
10,662
def drawing_update(self): from MAVProxy.modules.mavproxy_map import mp_slipmap if self.draw_callback is None: return self.draw_line.append(self.click_position) if len(self.draw_line) > 1: self.mpstate.map.add_object(mp_slipmap.SlipPolygon(, self.draw_line, layer=, linewidth=2, colour=(128,128,255)))
update line drawing
10,663
def register_multi_flags_validator(flag_names, multi_flags_checker, message=, flag_values=FLAGS): v = gflags_validators.MultiFlagsValidator( flag_names, multi_flags_checker, message) _add_validator(flag_values, v)
Adds a constraint to multiple flags. The constraint is validated when flags are initially parsed, and after each change of the corresponding flag's value. Args: flag_names: [str], a list of the flag names to be checked. multi_flags_checker: callable, a function to validate the flag. input - dictionary, with keys() being flag_names, and value for each key being the value of the corresponding flag (string, boolean, etc). output - Boolean. Must return True if validator constraint is satisfied. If constraint is not satisfied, it should either return False or raise gflags.ValidationError. message: Error text to be shown to the user if checker returns False. If checker raises gflags.ValidationError, message from the raised error will be shown. flag_values: An optional FlagValues instance to validate against. Raises: AttributeError: If a flag is not registered as a valid flag name.
10,664
def profile(*args, **kwargs): if _is_initialized(): def wrapper(f): return wrapHttpEndpoint(f) return wrapper raise Exception( "before measuring anything, you need to call init_app()")
http endpoint decorator
10,665
def __intermediate_bridge(self, interface, i): if interface[] == and i < 2: bridge_members = .join(interface.pop()) if bridge_members: interface[] = bridge_members else: interface[] = True del interface[] elif interface[] == and i >= 2: if not in interface[]: interface[] = .format(**interface) for attr in [, , , ]: if attr in interface: del interface[attr] elif interface[] != : del interface[] return interface
converts NetJSON bridge to UCI intermediate data structure
10,666
def get_bank_form(self, *args, **kwargs): if isinstance(args[-1], list) or in kwargs: return self.get_bank_form_for_create(*args, **kwargs) else: return self.get_bank_form_for_update(*args, **kwargs)
Pass through to provider BankAdminSession.get_bank_form_for_update
10,667
def generate_command(dag_id, task_id, execution_date, mark_success=False, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ignore_ti_state=False, local=False, pickle_id=None, file_path=None, raw=False, job_id=None, pool=None, cfg_path=None ): iso = execution_date.isoformat() cmd = ["airflow", "run", str(dag_id), str(task_id), str(iso)] cmd.extend(["--mark_success"]) if mark_success else None cmd.extend(["--pickle", str(pickle_id)]) if pickle_id else None cmd.extend(["--job_id", str(job_id)]) if job_id else None cmd.extend(["-A"]) if ignore_all_deps else None cmd.extend(["-i"]) if ignore_task_deps else None cmd.extend(["-I"]) if ignore_depends_on_past else None cmd.extend(["--force"]) if ignore_ti_state else None cmd.extend(["--local"]) if local else None cmd.extend(["--pool", pool]) if pool else None cmd.extend(["--raw"]) if raw else None cmd.extend(["-sd", file_path]) if file_path else None cmd.extend(["--cfg_path", cfg_path]) if cfg_path else None return cmd
Generates the shell command required to execute this task instance. :param dag_id: DAG ID :type dag_id: unicode :param task_id: Task ID :type task_id: unicode :param execution_date: Execution date for the task :type execution_date: datetime :param mark_success: Whether to mark the task as successful :type mark_success: bool :param ignore_all_deps: Ignore all ignorable dependencies. Overrides the other ignore_* parameters. :type ignore_all_deps: bool :param ignore_depends_on_past: Ignore depends_on_past parameter of DAGs (e.g. for Backfills) :type ignore_depends_on_past: bool :param ignore_task_deps: Ignore task-specific dependencies such as depends_on_past and trigger rule :type ignore_task_deps: bool :param ignore_ti_state: Ignore the task instance's previous failure/success :type ignore_ti_state: bool :param local: Whether to run the task locally :type local: bool :param pickle_id: If the DAG was serialized to the DB, the ID associated with the pickled DAG :type pickle_id: unicode :param file_path: path to the file containing the DAG definition :param raw: raw mode (needs more details) :param job_id: job ID (needs more details) :param pool: the Airflow pool that the task should run in :type pool: unicode :param cfg_path: the Path to the configuration file :type cfg_path: basestring :return: shell command that can be used to run the task instance
10,668
def read(self, src): geojson = None if not self.is_valid_src(src): error = "File < {0} > does not exists or does start with ." raise ValueError(error.format(src)) if not self.is_url(src): return open(src, ).read().decode().encode() tmp = self.get_location(src) if os.path.isfile(tmp): with open(tmp, ) as f: return f.read() response = urllib2.urlopen(src) data = response.read().decode().encode() with open(tmp, ) as f: f.write(data) return data
Download GeoJSON file of US counties from url (S3 bucket)
10,669
def update_user_ns(self, result): if result is not self.shell.user_ns[]: if len(self.shell.user_ns[]) >= self.cache_size and self.do_full_cache: warn(+ `self.cache_size`+ ) self.flush()
Update user_ns with various things like _, __, _1, etc.
10,670
def CheckCronJobAccess(self, username, cron_job_id): self._CheckAccess( username, str(cron_job_id), rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB)
Checks whether a given user can access given cron job.
10,671
def x_11paths_authorization(app_id, secret, context, utc=None): utc = utc or context.headers[X_11PATHS_DATE_HEADER_NAME] url_path = ensure_url_path_starts_with_slash(context.url_path) url_path_query = url_path if context.query_params: url_path_query += "?%s" % (url_encode(context.query_params, sort=True)) string_to_sign = (context.method.upper().strip() + "\n" + utc + "\n" + _get_11paths_serialized_headers(context.headers) + "\n" + url_path_query.strip()) if context.body_params and isinstance(context.renderer, FormRenderer): string_to_sign = string_to_sign + "\n" + url_encode(context.body_params, sort=True).replace("&", "") authorization_header_value = (AUTHORIZATION_METHOD + AUTHORIZATION_HEADER_FIELD_SEPARATOR + app_id + AUTHORIZATION_HEADER_FIELD_SEPARATOR + _sign_data(secret, string_to_sign)) return authorization_header_value
Calculate the authentication headers to be sent with a request to the API. :param app_id: :param secret: :param context :param utc: :return: array a map with the Authorization and Date headers needed to sign a Latch API request
10,672
def locked_context(self, key=None, default=dict): assert default in (list, dict, set) with self._lock: if key is None: yield self._shared_context else: sub_context = self._shared_context.get(key, None) if sub_context is None: sub_context = default() yield sub_context self._shared_context[key] = sub_context
Executor context is a shared memory object. All workers share this. It needs a lock. Its used like this: with executor.context() as context: visited = context['visited'] visited.append(state.cpu.PC) context['visited'] = visited
10,673
def download_and_calibrate_parallel(list_of_ids, n=None): setup_cluster(n_cores=n) c = Client() lbview = c.load_balanced_view() lbview.map_async(download_and_calibrate, list_of_ids) subprocess.Popen(["ipcluster", "stop", "--quiet"])
Download and calibrate in parallel. Parameters ---------- list_of_ids : list, optional container with img_ids to process n : int Number of cores for the parallel processing. Default: n_cores_system//2
10,674
def _tp_relfq_name(tp, tp_name=None, assumed_globals=None, update_assumed_globals=None, implicit_globals=None): if tp_name is None: tp_name = util.get_class_qualname(tp) if implicit_globals is None: implicit_globals = _implicit_globals else: implicit_globals = implicit_globals.copy() implicit_globals.update(_implicit_globals) if assumed_globals is None: if update_assumed_globals is None: return tp_name md = sys.modules[tp.__module__] if md in implicit_globals: return tp_name name = tp.__module__++tp_name pck = None if not (md.__package__ is None or md.__package__ == or name.startswith(md.__package__)): pck = md.__package__ return name if pck is None else pck++name if tp in assumed_globals: try: return assumed_globals[tp] except: return tp_name elif hasattr(tp, ) and tp.__origin__ in assumed_globals: try: return assumed_globals[tp.__origin__] except: return tp_name elif is_Callable(tp) and typing.Callable in assumed_globals: try: return assumed_globals[typing.Callable] except: return tp_name elif update_assumed_globals == True: if not assumed_globals is None: if hasattr(tp, ) and not tp.__origin__ is None: toadd = tp.__origin__ elif is_Callable(tp): toadd = typing.Callable else: toadd = tp if not sys.modules[toadd.__module__] in implicit_globals: assumed_globals.add(toadd) return tp_name else: md = sys.modules[tp.__module__] if md in implicit_globals: return tp_name md_name = tp.__module__ if md in assumed_globals: try: md_name = assumed_globals[md] except: pass else: if not (md.__package__ is None or md.__package__ == or md_name.startswith(md.__package__)): md_name = md.__package__++tp.__module__ return md_name++tp_name
Provides the fully qualified name of a type relative to a set of modules and types that is assumed as globally available. If assumed_globals is None this always returns the fully qualified name. If update_assumed_globals is True, this will return the plain type name, but will add the type to assumed_globals (expected to be a set). This way a caller can query how to generate an appropriate import section. If update_assumed_globals is False, assumed_globals can alternatively be a mapping rather than a set. In that case the mapping is expected to be an alias table, mapping modules or types to their alias names desired for displaying. update_assumed_globals can be None (default). In that case this will return the plain type name if assumed_globals is None as well (default). This mode is there to have a less involved default behavior.
10,675
def extract_operations(self, migrations): operations = [] for migration in migrations: for operation in migration.operations: if isinstance(operation, RunSQL): statements = sqlparse.parse(dedent(operation.sql)) for statement in statements: operation = SqlObjectOperation.parse(statement) if operation: operations.append(operation) if self.verbosity >= 2: self.stdout.write(" > % -100s (%s)" % (operation, migration)) return operations
Extract SQL operations from the given migrations
10,676
def geo_field(queryset): for field in queryset.model._meta.fields: if isinstance(field, models.GeometryField): return field raise exceptions.FieldDoesNotExist()
Returns the GeometryField for a django or spillway GeoQuerySet.
10,677
def transform(self, flip_x, flip_y, swap_xy): self.logger.debug("flip_x=%s flip_y=%s swap_xy=%s" % ( flip_x, flip_y, swap_xy)) with self.suppress_redraw: self.t_.set(flip_x=flip_x, flip_y=flip_y, swap_xy=swap_xy)
Transform view of the image. .. note:: Transforming the image is generally faster than rotating, if rotating in 90 degree increments. Also see :meth:`rotate`. Parameters ---------- flipx, flipy : bool If `True`, flip the image in the X and Y axes, respectively swapxy : bool If `True`, swap the X and Y axes.
10,678
def resize_old(self, block_size, order=0, mode=, cval=False): constant if not cval: cval = np.nan if (self.raster.dtype.name.find() != -1 and np.max(np.abs([self.max(), self.min()])) > 1): raster2 = (self.raster-self.min())/(self.max()-self.min()) else: raster2 = self.raster.copy() raster2 = raster2.astype(float) raster2[self.raster.mask] = np.nan raster2 = resize(raster2, block_size, order=order, mode=mode, cval=cval) raster2 = np.ma.masked_array(raster2, mask=np.isnan(raster2), fill_value=self.raster.fill_value) raster2 = raster2*(self.max()-self.min())+self.min() raster2[raster2.mask] = self.nodata_value raster2.mask = np.logical_or(np.isnan(raster2.data), raster2.data == self.nodata_value) geot = list(self.geot) [geot[-1], geot[1]] = np.array([geot[-1], geot[1]])*self.shape/block_size return GeoRaster(raster2, tuple(geot), nodata_value=self.nodata_value,\ projection=self.projection, datatype=self.datatype)
geo.resize(new_shape, order=0, mode='constant', cval=np.nan, preserve_range=True) Returns resized georaster
10,679
def get_listed_projects(): index_path = Path().resolve() / "docs" / "index.md" with open(index_path, "r") as index_file: lines = index_file.readlines() listed_projects = set() project_section = False for _, l in enumerate(lines): idx = l.find(PROJECT_KEY) if idx >= 0: project_section = True if project_section: start = l.find("](") if start > 0: closing_parenthesis = sorted( [m.start() for m in re.finditer(r"\)", l) if m.start() > start] )[0] project = l[start + 2 : closing_parenthesis] listed_projects.add(project) if len(listed_projects) > 0 and l.startswith(" return listed_projects return listed_projects
Find the projects listed in the Home Documentation's index.md file Returns: set(str): projects' names, with the '/' in their beginings
10,680
def fetch_wallet_balances(wallets, fiat, **modes): price_fetch = set([x[0] for x in wallets]) balances = {} prices = {} fetch_length = len(wallets) + len(price_fetch) helpers = {fiat.lower(): {}} if not modes.get(, False): for crypto in price_fetch: try: p = get_current_price( crypto, fiat, helper_prices=helpers, report_services=True, **modes ) prices[crypto] = {: p} if crypto in [, , , ]: helpers[fiat.lower()][crypto] = p except NoService as exc: prices[crypto] = {: str(exc)} for crypto, address in wallets: if address.replace(, ).isdigit(): balances[address] = {: float(address)} continue try: balances[address] = {: get_address_balance(crypto, address.strip(), **modes)} except NoService as exc: balances[address] = {: str(exc)} else: if modes.get(, False): print("Need to make", fetch_length, "external calls") with futures.ThreadPoolExecutor(max_workers=int(fetch_length / 2)) as executor: future_to_key = dict( (executor.submit( get_current_price, crypto, fiat, report_services=True, **modes ), crypto) for crypto in price_fetch ) future_to_key.update(dict( (executor.submit( get_address_balance, crypto, address.strip(), **modes ), address) for crypto, address in wallets )) done, not_done = futures.wait(future_to_key, return_when=futures.ALL_COMPLETED) if len(not_done) > 0: print (not_done) import debug raise Exception("Broke") for future in done: key = future_to_key[future] if len(key) > 5: which = balances else: which = prices res = future.result() which[key] = res ret = [] for crypto, address in wallets: error = None if in balances[address]: crypto_value = balances[address][] else: crypto_value = 0 error = balances[address][] if in prices[crypto]: sources, fiat_price = prices[crypto][] else: sources, fiat_price = [], 0 error = prices[crypto][] ret.append({ : crypto, : address, : crypto_value, : (crypto_value or 0) * (fiat_price or 0), : fiat_price, : sources[0].name if sources else "None", : error }) return ret
Wallets must be list of two item lists. First item is crypto, second item is the address. example: [ ['btc', '1PZ3Ps9RvCmUW1s1rHE25FeR8vtKUrhEai'], ['ltc', 'Lb78JDGxMcih1gs3AirMeRW6jaG5V9hwFZ'] ]
10,681
def real_ip(self): if self._real_ip is None: response = get(ICANHAZIP) self._real_ip = self._get_response_text(response) return self._real_ip
The actual public IP of this host.
10,682
def get_supported_currency_choices(api_key): import stripe stripe.api_key = api_key account = stripe.Account.retrieve() supported_payment_currencies = stripe.CountrySpec.retrieve(account["country"])[ "supported_payment_currencies" ] return [(currency, currency.upper()) for currency in supported_payment_currencies]
Pull a stripe account's supported currencies and returns a choices tuple of those supported currencies. :param api_key: The api key associated with the account from which to pull data. :type api_key: str
10,683
def add_fields( layer, absolute_values, static_fields, dynamic_structure): for new_dynamic_field in dynamic_structure: field_definition = new_dynamic_field[0] unique_values = new_dynamic_field[1] for column in unique_values: if (column == or (hasattr(column, ) and column.isNull())): column = field = create_field_from_definition(field_definition, column) layer.addAttribute(field) key = field_definition[] % column value = field_definition[] % column layer.keywords[][key] = value for static_field in static_fields: field = create_field_from_definition(static_field) layer.addAttribute(field) layer.keywords[][static_field[]] = ( static_field[]) for absolute_field in list(absolute_values.keys()): field_definition = definition(absolute_values[absolute_field][1]) field = create_field_from_definition(field_definition) layer.addAttribute(field) key = field_definition[] value = field_definition[] layer.keywords[][key] = value
Function to add fields needed in the output layer. :param layer: The vector layer. :type layer: QgsVectorLayer :param absolute_values: The absolute value structure. :type absolute_values: dict :param static_fields: The list of static fields to add. :type static_fields: list :param dynamic_structure: The list of dynamic fields to add to the layer. The list must be structured like this: dynamic_structure = [ [exposure_count_field, unique_exposure] ] where "exposure_count_field" is the dynamic to field to add and "unique_exposure" is the list of unique values to associate with this dynamic field. Because dynamic_structure is a ordered list, you can add many dynamic fields. :type dynamic_structure: list
10,684
def __setWildcardSymbol(self, value): errors = [] if not value is str and not value.split(): errors.append() else: self.__wildcardSymbol = value if errors: view.Tli.showErrors(, errors)
self.__wildcardSymbol variable setter
10,685
def ignore_missing_email_protection_eku_cb(ok, ctx): err = ctx.get_error() if err != m2.X509_V_ERR_INVALID_PURPOSE: return ok if ctx.get_error_depth() > 0: return ok cert = ctx.get_current_cert() try: key_usage = cert.get_ext().get_value() if not in key_usage \ and not in key_usage: return ok except LookupError: pass return 1
For verifying PKCS7 signature, m2Crypto uses OpenSSL's PKCS7_verify(). The latter requires that ExtendedKeyUsage extension, if present, contains 'emailProtection' OID. (Is it because S/MIME is/was the primary use case for PKCS7?) We do not want to fail the verification in this case. At present, M2Crypto lacks possibility of removing or modifying an existing extension. Let's assign a custom verification callback.
10,686
def interactive(): global n if request.method == and request.args.get(, ) != "": return request.args.get(, ) if request.method == : logging.warning( ) else: return render_template()
Interactive classifier.
10,687
def can_edit(self, user=None, request=None): can = False if request and not self.owner: if (getattr(settings, "LEAFLET_STORAGE_ALLOW_ANONYMOUS", False) and self.is_anonymous_owner(request)): can = True if user and user.is_authenticated(): self.owner = user self.save() msg = _("Your anonymous map has been attached to your account %s" % user) messages.info(request, msg) if self.edit_status == self.ANONYMOUS: can = True elif not user.is_authenticated(): pass elif user == self.owner: can = True elif self.edit_status == self.EDITORS and user in self.editors.all(): can = True return can
Define if a user can edit or not the instance, according to his account or the request.
10,688
def cache_cluster_exists(name, conn=None, region=None, key=None, keyid=None, profile=None): return bool(describe_cache_clusters(name=name, conn=conn, region=region, key=key, keyid=keyid, profile=profile))
Check to see if a cache cluster exists. Example: .. code-block:: bash salt myminion boto3_elasticache.cache_cluster_exists myelasticache
10,689
def tidy_all_the_variables(host, inventory_mgr): global _vars _vars = dict() _vars[host] = inventory_mgr.inventory.get_host_vars(host) for group in host.get_groups(): remove_inherited_and_overridden_vars(_vars[host], group, inventory_mgr) remove_inherited_and_overridden_group_vars(group, inventory_mgr) return _vars
removes all overridden and inherited variables from hosts and groups
10,690
def put_comments(self, resource, comment, timeout=None): params = {: self.api_key, : resource, : comment} try: response = requests.post(self.base + , params=params, proxies=self.proxies, timeout=timeout) except requests.RequestException as e: return dict(error=str(e)) return _return_response_and_status_code(response)
Post a comment on a file or URL. The initial idea of VirusTotal Community was that users should be able to make comments on files and URLs, the comments may be malware analyses, false positive flags, disinfection instructions, etc. Imagine you have some automatic setup that can produce interesting results related to a given sample or URL that you submit to VirusTotal for antivirus characterization, you might want to give visibility to your setup by automatically reviewing samples and URLs with the output of your automation. :param resource: either a md5/sha1/sha256 hash of the file you want to review or the URL itself that you want to comment on. :param comment: the actual review, you can tag it using the "#" twitter-like syntax (e.g. #disinfection #zbot) and reference users using the "@" syntax (e.g. @VirusTotalTeam). :param timeout: The amount of time in seconds the request should wait before timing out. :return: If the comment was successfully posted the response code will be 1, 0 otherwise.
10,691
def getdrawings(): infos = Info.query.all() sketches = [json.loads(info.contents) for info in infos] return jsonify(drawings=sketches)
Get all the drawings.
10,692
def to_python(self, value: Union[Dict[str, int], int, None]) -> LocalizedIntegerValue: db_value = super().to_python(value) return self._convert_localized_value(db_value)
Converts the value from a database value into a Python value.
10,693
def path(self, category = None, image = None, feature = None): filename = None if not category is None: filename = join(self.impath, str(category)) if not image is None: assert not category is None, "The category has to be given if the image is given" filename = join(filename, % (str(category), str(image))) if not feature is None: assert category != None and image != None, "If a feature name is given the category and image also have to be given." filename = join(self.ftrpath, str(category), feature, % (str(category), str(image))) return filename
Constructs the path to categories, images and features. This path function assumes that the following storage scheme is used on the hard disk to access categories, images and features: - categories: /impath/category - images: /impath/category/category_image.png - features: /ftrpath/category/feature/category_image.mat The path function is called to query the location of categories, images and features before they are loaded. Thus, if your features are organized in a different way, you can simply replace this method such that it returns appropriate paths' and the LoadFromDisk loader will use your naming scheme.
10,694
def run(command, parser, cl_args, unknown_args): location = cl_args[].split() if len(location) == 1: return show_cluster(cl_args, *location) elif len(location) == 2: return show_cluster_role(cl_args, *location) elif len(location) == 3: return show_cluster_role_env(cl_args, *location) else: Log.error() return False
run command
10,695
def Dependencies(lTOC, xtrapath=None, manifest=None): for nm, pth, typ in lTOC: if seen.get(nm.upper(), 0): continue logger.info("Analyzing %s", pth) seen[nm.upper()] = 1 if is_win: for ftocnm, fn in selectAssemblies(pth, manifest): lTOC.append((ftocnm, fn, )) for lib, npth in selectImports(pth, xtrapath): if seen.get(lib.upper(), 0) or seen.get(npth.upper(), 0): continue seen[npth.upper()] = 1 lTOC.append((lib, npth, )) return lTOC
Expand LTOC to include all the closure of binary dependencies. LTOC is a logical table of contents, ie, a seq of tuples (name, path). Return LTOC expanded by all the binary dependencies of the entries in LTOC, except those listed in the module global EXCLUDES manifest should be a winmanifest.Manifest instance on Windows, so that all dependent assemblies can be added
10,696
def bench_serpy(): class FooSerializer(serpy.DictSerializer): attr_2 = serpy.IntField() attr_1 = serpy.StrField() return [FooSerializer(obj).data for obj in object_loader()]
Beanchmark for 1000 objects with 2 fields.
10,697
def to_float_with_default(value, default_value): result = FloatConverter.to_nullable_float(value) return result if result != None else default_value
Converts value into float or returns default when conversion is not possible. :param value: the value to convert. :param default_value: the default value. :return: float value or default value when conversion is not supported.
10,698
def send_stats(self, start, environ, response_interception, exception=None): if response_interception: key_name = self.get_key_name(environ, response_interception, exception=exception) timer = self.statsd_client.timer(key_name) timer._start_time = start timer.stop()
Send the actual timing stats. :param start: start time in seconds since the epoch as a floating point number :type start: float :param environ: wsgi environment :type environ: dict :param response_interception: dictionary in form {'status': '<response status>', 'response_headers': [<response headers], 'exc_info': <exc_info>} This is the interception of what was passed to start_response handler. :type response_interception: dict :param exception: optional exception happened during the iteration of the response :type exception: Exception
10,699
def read(self): l = len(self.wire) if l < 12: raise ShortHeader (self.message.id, self.message.flags, qcount, ancount, aucount, adcount) = struct.unpack(, self.wire[:12]) self.current = 12 if dns.opcode.is_update(self.message.flags): self.updating = True self._get_question(qcount) if self.question_only: return self._get_section(self.message.answer, ancount) self._get_section(self.message.authority, aucount) self._get_section(self.message.additional, adcount) if self.current != l: raise TrailingJunk if self.message.multi and self.message.tsig_ctx and \ not self.message.had_tsig: self.message.tsig_ctx.update(self.wire)
Read a wire format DNS message and build a dns.message.Message object.