Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
387,700
def list_networks(full_ids=False): networks = docker_fabric().networks() _format_output_table(networks, NETWORK_COLUMNS, full_ids)
Lists networks on the Docker remote host, similar to ``docker network ls``. :param full_ids: Shows the full network ids. When ``False`` (default) only shows the first 12 characters. :type full_ids: bool
387,701
def get_ssl_context(private_key, certificate): if ( certificate and os.path.isfile(certificate) and private_key and os.path.isfile(private_key) ): context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(certificate, private_key) return context return None
Get ssl context from private key and certificate paths. The return value is used when calling Flask. i.e. app.run(ssl_context=get_ssl_context(,,,))
387,702
def stop(self): should_sleep = self._is_running super(Sensor, self).stop() if should_sleep: time.sleep(max(self._select_timeout, self._retransmit_timeout) + 1) if self._listen_socket is not None: self._shutdown_listen_socket()
Stop the interface :rtype: None
387,703
def check_frequencies(pfeed, *, as_df=False, include_warnings=False): table = problems = [] if pfeed.frequencies is None: problems.append([, , table, []]) else: f = pfeed.frequencies.copy() problems = check_for_required_columns(problems, table, f) if problems: return gt.format_problems(problems, as_df=as_df) if include_warnings: problems = check_for_invalid_columns(problems, table, f) for column in [, ]: problems = gt.check_column(problems, table, f, column, gt.valid_str, column_required=False) cond = ~(f[].notnull() | f[].notnull()) problems = gt.check_table(problems, table, f, cond, ) v = lambda x: x in range(8) problems = gt.check_column(problems, table, f, , v) problems = gt.check_column_linked_id(problems, table, f, , pfeed.service_windows) v = lambda x: x in range(3) problems = gt.check_column(problems, table, f, , v) v = lambda x: isinstance(x, int) problems = gt.check_column(problems, table, f, , v) problems = gt.check_column(problems, table, f, , valid_speed, column_required=False) problems = gt.check_column_linked_id(problems, table, f, , pfeed.shapes) return gt.format_problems(problems, as_df=as_df)
Check that ``pfeed.frequency`` follows the ProtoFeed spec. Return a list of problems of the form described in :func:`gt.check_table`; the list will be empty if no problems are found.
387,704
def get_changesets(self, start=None, end=None, start_date=None, end_date=None, branch_name=None, reverse=False): start_raw_id = self._get_revision(start) start_pos = self.revisions.index(start_raw_id) if start else None end_raw_id = self._get_revision(end) end_pos = self.revisions.index(end_raw_id) if end else None if None not in [start, end] and start_pos > end_pos: raise RepositoryError("Start revision cannot be " "after end revision " % (start, end)) if branch_name and branch_name not in self.allbranches.keys(): raise BranchDoesNotExistError( % branch_name) if end_pos is not None: end_pos += 1 filter_ = [] if branch_name: filter_.append( % (branch_name)) if start_date: filter_.append( % start_date) if end_date: filter_.append( % end_date) if filter_: revisions = scmutil.revrange(self._repo, [" and ".join(filter_)]) else: revisions = self.revisions revs = revisions[start_pos:end_pos] if reverse: revs = reversed(revs) return CollectionGenerator(self, revs)
Returns iterator of ``MercurialChangeset`` objects from start to end (both are inclusive) :param start: None, str, int or mercurial lookup format :param end: None, str, int or mercurial lookup format :param start_date: :param end_date: :param branch_name: :param reversed: return changesets in reversed order
387,705
def _prepare_hiveconf(d): if not d: return [] return as_flattened_list( zip(["-hiveconf"] * len(d), ["{}={}".format(k, v) for k, v in d.items()]) )
This function prepares a list of hiveconf params from a dictionary of key value pairs. :param d: :type d: dict >>> hh = HiveCliHook() >>> hive_conf = {"hive.exec.dynamic.partition": "true", ... "hive.exec.dynamic.partition.mode": "nonstrict"} >>> hh._prepare_hiveconf(hive_conf) ["-hiveconf", "hive.exec.dynamic.partition=true",\ "-hiveconf", "hive.exec.dynamic.partition.mode=nonstrict"]
387,706
def main(): if in sys.argv: print(main.__doc__) sys.exit() if in sys.argv: while 1: try: ans=input("Input Pole Latitude [positive north]: <cntrl-D to quit> ") plat=float(ans) ans=input("Input Pole Longitude [positive east]: ") plon =float(ans) ans=input("Input Site Latitude: ") slat =float(ans) ans=input("Input Site Longitude: ") slong =float(ans) dec,inc=pmag.vgp_di(plat,plon,slat,slong) print(%(dec,inc)) except EOFError: print("\n Good-bye\n") sys.exit() elif in sys.argv: ind=sys.argv.index() file=sys.argv[ind+1] f=open(file,) inp = f.readlines() for line in inp: dec,inc= spitout(line) else: inp = sys.stdin.readlines() for line in inp: spitout(line)
NAME vgp_di.py DESCRIPTION converts site latitude, longitude and pole latitude, longitude to declination, inclination SYNTAX vgp_di.py [-h] [-i] [-f FILE] [< filename] OPTIONS -h prints help message and quits -i interactive data entry -f FILE to specify file name on the command line INPUT for file entry: PLAT PLON SLAT SLON where: PLAT: pole latitude PLON: pole longitude (positive east) SLAT: site latitude (positive north) SLON: site longitude (positive east) OUTPUT D I where: D: declination I: inclination
387,707
def get_load(jid): jid_dir = salt.utils.jid.jid_dir(jid, _job_dir(), __opts__[]) load_fn = os.path.join(jid_dir, LOAD_P) if not os.path.exists(jid_dir) or not os.path.exists(load_fn): return {} serial = salt.payload.Serial(__opts__) ret = {} load_p = os.path.join(jid_dir, LOAD_P) num_tries = 5 for index in range(1, num_tries + 1): with salt.utils.files.fopen(load_p, ) as rfh: try: ret = serial.load(rfh) break except Exception as exc: if index == num_tries: time.sleep(0.25) else: log.critical(, load_p) raise exc if ret is None: ret = {} minions_cache = [os.path.join(jid_dir, MINIONS_P)] minions_cache.extend( glob.glob(os.path.join(jid_dir, SYNDIC_MINIONS_P.format())) ) all_minions = set() for minions_path in minions_cache: log.debug(, minions_path) try: with salt.utils.files.fopen(minions_path, ) as rfh: all_minions.update(serial.load(rfh)) except IOError as exc: salt.utils.files.process_read_exception(exc, minions_path) if all_minions: ret[] = sorted(all_minions) return ret
Return the load data that marks a specified jid
387,708
def get_timeout(self): "setup a timeout for waiting for a proposal" if self.timeout_time is not None or self.proposal: return now = self.cm.chainservice.now round_timeout = ConsensusManager.round_timeout round_timeout_factor = ConsensusManager.round_timeout_factor delay = round_timeout * round_timeout_factor ** self.round self.timeout_time = now + delay return delay
setup a timeout for waiting for a proposal
387,709
def write_temp_file(self, content, filename=None, mode=): if filename is None: filename = str(uuid.uuid4()) fqpn = os.path.join(self.tcex.default_args.tc_temp_path, filename) with open(fqpn, mode) as fh: fh.write(content) return fqpn
Write content to a temporary file. Args: content (bytes|str): The file content. If passing binary data the mode needs to be set to 'wb'. filename (str, optional): The filename to use when writing the file. mode (str, optional): The file write mode which could be either 'w' or 'wb'. Returns: str: Fully qualified path name for the file.
387,710
def all_notebook_jobs(self): from db.models.notebooks import NotebookJob return NotebookJob.all.filter(project=self)
Similar to notebook_jobs, but uses the default manager to return archived experiments as well.
387,711
def simxGetArrayParameter(clientID, paramIdentifier, operationMode): paramValues = (ct.c_float*3)() ret = c_GetArrayParameter(clientID, paramIdentifier, paramValues, operationMode) arr = [] for i in range(3): arr.append(paramValues[i]) return ret, arr
Please have a look at the function description/documentation in the V-REP user manual
387,712
def get_notifications(self, startDate, endDate, loadBalancerID, loadBalancerRuleID): return self._call(GetLoadBalancerNotifications, startDate=startDate, endDate=endDate, loadBalancerID=loadBalancerID, loadBalancerRuleID=loadBalancerRuleID)
Get the load balancer notifications for a specific rule within a specifying window time frame :type startDate: datetime :type endDate: datetime :type loadBalancerID: int :type loadBalancerRuleID: int :param startDate: From Date :param endDate: To Date :param loadBalancerID: ID of the Laod Balancer :param loadBalancerRuleID: ID of the Load Balancer Rule
387,713
def merge_with(self, other, multiset_op, other_op=None): result = FeatureCollection() for ms_name in set(self._counters()) | set(other._counters()): c1 = self.get(ms_name, None) c2 = other.get(ms_name, None) if c1 is None and c2 is not None: c1 = c2.__class__() if c2 is None and c1 is not None: c2 = c1.__class__() result[ms_name] = multiset_op(c1, c2) if other_op is not None: for o_name in (set(self._not_counters()) | set(other._not_counters())): v = other_op(self.get(o_name, None), other.get(o_name, None)) if v is not None: result[o_name] = v return result
Merge this feature collection with another. Merges two feature collections using the given ``multiset_op`` on each corresponding multiset and returns a new :class:`FeatureCollection`. The contents of the two original feature collections are not modified. For each feature name in both feature sets, if either feature collection being merged has a :class:`collections.Counter` instance as its value, then the two values are merged by calling `multiset_op` with both values as parameters. If either feature collection has something other than a :class:`collections.Counter`, and `other_op` is not :const:`None`, then `other_op` is called with both values to merge them. If `other_op` is :const:`None` and a feature is not present in either feature collection with a counter value, then the feature will not be present in the result. :param other: The feature collection to merge into ``self``. :type other: :class:`FeatureCollection` :param multiset_op: Function to merge two counters :type multiset_op: fun(Counter, Counter) -> Counter :param other_op: Function to merge two non-counters :type other_op: fun(object, object) -> object :rtype: :class:`FeatureCollection`
387,714
def convergence_from_grid(self, grid): surface_density_grid = np.zeros(grid.shape[0]) grid_eta = self.grid_to_elliptical_radii(grid) for i in range(grid.shape[0]): surface_density_grid[i] = self.convergence_func(grid_eta[i]) return surface_density_grid
Calculate the projected convergence at a given set of arc-second gridded coordinates. Parameters ---------- grid : grids.RegularGrid The grid of (y,x) arc-second coordinates the surface density is computed on.
387,715
def load_isd_daily_temp_data( self, start, end, read_from_cache=True, write_to_cache=True ): return load_isd_daily_temp_data( self.usaf_id, start, end, read_from_cache=read_from_cache, write_to_cache=write_to_cache, )
Load resampled daily ISD temperature data from start date to end date (inclusive). This is the primary convenience method for loading resampled daily ISD temperature data. Parameters ---------- start : datetime.datetime The earliest date from which to load data. end : datetime.datetime The latest date until which to load data. read_from_cache : bool Whether or not to load data from cache. write_to_cache : bool Whether or not to write newly loaded data to cache.
387,716
def export_to_dicts(table, *args, **kwargs): field_names = table.field_names return [{key: getattr(row, key) for key in field_names} for row in table]
Export a `rows.Table` to a list of dicts
387,717
def handle_selected_page(self): if not self.selected_page: pass if self.selected_page.name in (, ): self.selected_page = self.selected_page.loop() elif self.selected_page.name in (, ): self.active = False else: raise RuntimeError(self.selected_page.name)
Open the subscription and submission pages subwindows, but close the current page if any other type of page is selected.
387,718
def delete_ec2_nodes( instance_id_list, client=None ): if not client: client = boto3.client() resp = client.terminate_instances( InstanceIds=instance_id_list ) return resp
This deletes EC2 nodes and terminates the instances. Parameters ---------- instance_id_list : list of str A list of EC2 instance IDs to terminate. client : boto3.Client or None If None, this function will instantiate a new `boto3.Client` object to use in its operations. Alternatively, pass in an existing `boto3.Client` instance to re-use it here. Returns ------- Nothing.
387,719
def _make_repr_table_from_sframe(X): assert isinstance(X, _SFrame) column_names = X.column_names() out_data = [ [None]*len(column_names) for i in range(X.num_rows())] column_sizes = [len(s) for s in column_names] for i, c in enumerate(column_names): for j, e in enumerate(X[c]): out_data[j][i] = str(e) column_sizes[i] = max(column_sizes[i], len(e)) out_data = ([ [cn.ljust(k, ) for cn, k in zip(column_names, column_sizes)], ["-"*k for k in column_sizes] ] + [ [e.ljust(k, ) for e, k in zip(row, column_sizes)] for row in out_data] ) return [.join(row) for row in out_data]
Serializes an SFrame to a list of strings, that, when printed, creates a well-formatted table.
387,720
def state(self): state = self._resource.get(, self.default_state) if state in State: return state else: return getattr(State, state)
Get the Document's state
387,721
def _check_connection(self): try: super()._check_connection() except OSError as exc: _LOGGER.error(exc) self.protocol.transport.close() self.protocol.conn_lost_callback() return task = self.loop.call_later( self.reconnect_timeout + 0.1, self._check_connection) self.cancel_check_conn = task.cancel
Check if connection is alive every reconnect_timeout seconds.
387,722
def __create_coordinates(self, lat, lon, elev): lat.sort() lon.sort() geo_dict = {} if len(lat) == 2 and len(lon) == 2: if lat[0] == lat[1] and lon[0] == lon[1]: logger_noaa_lpd.info("coordinates found: {}".format("2")) lat.pop() lon.pop() geo_dict = self.__geo_point(lat, lon, elev) else: logger_noaa_lpd.info("coordinates found: {}".format("4")) geo_dict = self.__geo_multipoint(lat, lon, elev) elif len(lat) == 1 and len(lon) == 1: logger_noaa_lpd.info("coordinates found: {}".format("2")) geo_dict = self.__geo_point(lat, lon, elev) elif not lat and not lon: logger_noaa_lpd.info("coordinates found: {}".format("0")) else: geo_dict = {} logger_noaa_lpd.info("coordinates found: {}".format("too many")) return geo_dict
GeoJSON standard: Use to determine 2-point or 4-point coordinates :param list lat: :param list lon: :return dict:
387,723
def predict(self): RV = np.zeros((self.N,self.P)) for term_i in range(self.n_terms): RV+=np.dot(self.Fstar()[term_i],np.dot(self.B()[term_i],self.Astar()[term_i])) return RV
predict the value of the fixed effect
387,724
def relative_datetime(self): now = datetime.now(timezone.utc) tense = "from now" if self.created_at > now else "ago" return "{0} {1}".format(humanize.naturaldelta(now - self.created_at), tense)
Return human-readable relative time string.
387,725
def plot_evec(fignum, Vs, symsize, title): plt.figure(num=fignum) plt.text(-1.1, 1.15, title) symb, symkey = [, , ], 0 col = [, , ] for VEC in range(3): X, Y = [], [] for Vdirs in Vs: XY = pmag.dimap(Vdirs[VEC][0], Vdirs[VEC][1]) X.append(XY[0]) Y.append(XY[1]) plt.scatter(X, Y, s=symsize, marker=symb[VEC], c=col[VEC], edgecolors=) plt.axis("equal")
plots eigenvector directions of S vectors Paramters ________ fignum : matplotlib figure number Vs : nested list of eigenvectors symsize : size in pts for symbol title : title for plot
387,726
def generate_ppi_network( ppi_graph_path: str, dge_list: List[Gene], max_adj_p: float, max_log2_fold_change: float, min_log2_fold_change: float, ppi_edge_min_confidence: Optional[float] = None, current_disease_ids_path: Optional[str] = None, disease_associations_path: Optional[str] = None, ) -> Network: protein_interactions = parsers.parse_ppi_graph(ppi_graph_path, ppi_edge_min_confidence) protein_interactions = protein_interactions.simplify() if disease_associations_path is not None and current_disease_ids_path is not None: current_disease_ids = parsers.parse_disease_ids(current_disease_ids_path) disease_associations = parsers.parse_disease_associations(disease_associations_path, current_disease_ids) else: disease_associations = None network = Network( protein_interactions, max_adj_p=max_adj_p, max_l2fc=max_log2_fold_change, min_l2fc=min_log2_fold_change, ) network.set_up_network(dge_list, disease_associations=disease_associations) return network
Generate the protein-protein interaction network. :return Network: Protein-protein interaction network with information on differential expression.
387,727
def get_authorization_user(self, **kwargs): if self.authorization_user is not None: return self.authorization_user self.authorization_user = self.request.user return self.request.user
Gets the user the authorization object is for.
387,728
def load_schema(schema_path): try: with open(schema_path) as schema_file: schema = json.load(schema_file) except ValueError as e: raise SchemaInvalidError( % (schema_file.name, str(e))) return schema
Load the JSON schema at the given path as a Python object. Args: schema_path: A filename for a JSON schema. Returns: A Python object representation of the schema.
387,729
def _gzip_open_filename(handle): import gzip if sys.version_info[0] > 2: handle = gzip.open(handle, mode=, encoding="UTF-8") else: handle = gzip.open(handle) return handle
Hide Python 2 vs. 3 differences in gzip.open()
387,730
def get_as_bytes(self, s3_path): (bucket, key) = self._path_to_bucket_and_key(s3_path) obj = self.s3.Object(bucket, key) contents = obj.get()[].read() return contents
Get the contents of an object stored in S3 as bytes :param s3_path: URL for target S3 location :return: File contents as pure bytes
387,731
def _set_url(self, url): original_url = self._url try: self._update_index(url) except: self._url = original_url raise
Set a new URL for the data server. If we're unable to contact the given url, then the original url is kept.
387,732
def playURI(self, uri): url: str = get_url("/remote/play.json") params = { "oauth": self._oauth_token, "csrf": self._csrf_token, "uri": uri, "context": uri, } r = self._request(url=url, params=params) return r.json()
Play a Spotify uri, for example spotify:track:5Yn8WCB4Dqm8snemB5Mu4K :param uri: Playlist, Artist, Album, or Song Uri
387,733
def _op_msg_no_header(flags, command, identifier, docs, check_keys, opts): encoded = _dict_to_bson(command, False, opts) flags_type = _pack_op_msg_flags_type(flags, 0) total_size = len(encoded) max_doc_size = 0 if identifier: type_one = _pack_byte(1) cstring = _make_c_string(identifier) encoded_docs = [_dict_to_bson(doc, check_keys, opts) for doc in docs] size = len(cstring) + sum(len(doc) for doc in encoded_docs) + 4 encoded_size = _pack_int(size) total_size += size max_doc_size = max(len(doc) for doc in encoded_docs) data = ([flags_type, encoded, type_one, encoded_size, cstring] + encoded_docs) else: data = [flags_type, encoded] return b.join(data), total_size, max_doc_size
Get a OP_MSG message. Note: this method handles multiple documents in a type one payload but it does not perform batch splitting and the total message size is only checked *after* generating the entire message.
387,734
def get_item_with_id(self, uid): for item in self.get_items(): if item.id == uid: return item return None
Returns item for defined UID. >>> book.get_item_with_id('image_001') :Args: - uid: UID for the item :Returns: Returns item object. Returns None if nothing was found.
387,735
def compare_and_set(self, expect, update): with self._reference.get_lock(): if self._reference.value == expect: self._reference.value = update return True return False
Atomically sets the value to `update` if the current value is equal to `expect`. :param expect: The expected current value. :param update: The value to set if and only if `expect` equals the current value.
387,736
def convert_sequence_to_motor_units(cycles, unit_converter): return cv_cycles
Converts a move sequence to motor units. Converts a move sequence to motor units using the provied converter. Parameters ---------- cycles : iterable of dicts The iterable of cycles of motion to do one after another. See ``compile_sequence`` for format. unit_converter : UnitConverter, optional ``GeminiMotorDrive.utilities.UnitConverter`` to use to convert the units in `cycles` to motor units. Returns ------- motor_cycles : list of dicts A deep copy of `cycles` with all units converted to motor units. See Also -------- compile_sequence GeminiMotorDrive.utilities.UnitConverter
387,737
def run_step(context): logger.debug("started") context.clear() logger.info(f"Context wiped. New context size: {len(context)}") logger.debug("done")
Wipe the entire context. Args: Context is a dictionary or dictionary-like. Does not require any specific keys in context.
387,738
def hash(self): return u.join([ self.alias, self.description, str(self.ignored), str(self.flags), ])
Return a value that's used to uniquely identify an entry in a date so we can regroup all entries that share the same hash.
387,739
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): ln_mean, [ln_stddev] = super().get_mean_and_stddevs( sites, rup, dists, imt, stddev_types) coeffs = self.COEFFS_UPPER[imt] ln_mean += np.log(coeffs[]) return ln_mean, [ln_stddev]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for specification of input and result values. Implements the correction factor for the upper crust, equation (12) on p. 484: ``P' = P x Correction_factor``
387,740
def _truncated_power_method(self, A, x0, k, max_iter=10000, thresh=1e-8): xts = [x0] for t in range(max_iter): xts.append(self._normalize(self._truncate(np.dot(A, xts[-1]), k))) if np.linalg.norm(xts[-1] - xts[-2]) < thresh: break return xts[-1]
given a matrix A, an initial guess x0, and a maximum cardinality k, find the best k-sparse approximation to its dominant eigenvector References ---------- [1] Yuan, X-T. and Zhang, T. "Truncated Power Method for Sparse Eigenvalue Problems." Journal of Machine Learning Research. Vol. 14. 2013. http://www.jmlr.org/papers/volume14/yuan13a/yuan13a.pdf
387,741
def api_request(*args, **kwargs): r = requests.request(*args, **kwargs) return APIResponse(r)
Wrapper which converts a requests.Response into our custom APIResponse object :param args: :param kwargs: :return:
387,742
def reftrack_element_data(rt, role): element = rt.get_element() if element is None: return if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole: return element.name
Return the data for the element (e.g. the Asset or Shot) :param rt: the :class:`jukeboxcore.reftrack.Reftrack` holds the data :type rt: :class:`jukeboxcore.reftrack.Reftrack` :param role: item data role :type role: QtCore.Qt.ItemDataRole :returns: data for the element :rtype: depending on role :raises: None
387,743
def list_tables(self): lines = output_lines(self.exec_psql()) return [line.split() for line in lines]
Runs the ``\\dt`` command and returns a list of column values with information about all tables in the database.
387,744
def fetch_next(self): if not self._buffer_size() and self.alive: return self._get_more() elif self._buffer_size(): future = self._framework.get_future(self.get_io_loop()) future.set_result(True) return future else: future = self._framework.get_future(self.get_io_loop()) future.set_result(False) return future
A Future used with `gen.coroutine`_ to asynchronously retrieve the next document in the result set, fetching a batch of documents from the server if necessary. Resolves to ``False`` if there are no more documents, otherwise :meth:`next_object` is guaranteed to return a document. .. _`gen.coroutine`: http://tornadoweb.org/en/stable/gen.html .. doctest:: fetch_next :hide: >>> _ = MongoClient().test.test_collection.delete_many({}) >>> collection = MotorClient().test.test_collection .. doctest:: fetch_next >>> @gen.coroutine ... def f(): ... yield collection.insert_many([{'_id': i} for i in range(5)]) ... cursor = collection.find().sort([('_id', 1)]) ... while (yield cursor.fetch_next): ... doc = cursor.next_object() ... sys.stdout.write(str(doc['_id']) + ', ') ... print('done') ... >>> IOLoop.current().run_sync(f) 0, 1, 2, 3, 4, done While it appears that fetch_next retrieves each document from the server individually, the cursor actually fetches documents efficiently in `large batches`_. In Python 3.5 and newer, cursors can be iterated elegantly and very efficiently in native coroutines with `async for`: .. doctest:: fetch_next >>> async def f(): ... async for doc in collection.find(): ... sys.stdout.write(str(doc['_id']) + ', ') ... print('done') ... >>> IOLoop.current().run_sync(f) 0, 1, 2, 3, 4, done .. _`large batches`: https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches
387,745
def attach_event_handler(canvas, handler=close_on_esc_or_middlemouse): if getattr(canvas, "_py_event_dispatcher_attached", None): return event_dispatcher = C.TPyDispatcherProcessedEvent(handler) canvas.Connect("ProcessedEvent(int,int,int,TObject*)", "TPyDispatcherProcessedEvent", event_dispatcher, "Dispatch(int,int,int,TObject*)") canvas._py_event_dispatcher_attached = event_dispatcher
Attach a handler function to the ProcessedEvent slot, defaulting to closing when middle mouse is clicked or escape is pressed Note that escape only works if the pad has focus, which in ROOT-land means the mouse has to be over the canvas area.
387,746
def visit_AsyncFunctionDef(self, node): node = self.get_function_node(node) if node is not None: node._async = True
Visit an async function node.
387,747
def _load_isd_station_metadata(download_path): from shapely.geometry import Point isd_history = pd.read_csv( os.path.join(download_path, "isd-history.csv"), dtype=str, parse_dates=["BEGIN", "END"], ) hasGEO = ( isd_history.LAT.notnull() & isd_history.LON.notnull() & (isd_history.LAT != 0) ) isUS = ( ((isd_history.CTRY == "US") & (isd_history.STATE.notnull())) | (isd_history.CTRY.str[1] == "Q") ) hasUSAF = isd_history.USAF != "999999" metadata = {} for usaf_station, group in isd_history[hasGEO & isUS & hasUSAF].groupby("USAF"): recent = group.loc[group.END.idxmax()] wban_stations = list(group.WBAN) metadata[usaf_station] = { "usaf_id": usaf_station, "wban_ids": wban_stations, "recent_wban_id": recent.WBAN, "name": recent["STATION NAME"], "icao_code": recent.ICAO, "latitude": recent.LAT if recent.LAT not in ("+00.000",) else None, "longitude": recent.LON if recent.LON not in ("+000.000",) else None, "point": Point(float(recent.LON), float(recent.LAT)), "elevation": recent["ELEV(M)"] if not str(float(recent["ELEV(M)"])).startswith("-999") else None, "state": recent.STATE, } return metadata
Collect metadata for US isd stations.
387,748
def QA_SU_save_stock_list(client=DATABASE, ui_log=None, ui_progress=None): client.drop_collection() coll = client.stock_list coll.create_index() try: QA_util_log_info( , ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=5000 ) stock_list_from_tdx = QA_fetch_get_stock_list() pandas_data = QA_util_to_json_from_pandas(stock_list_from_tdx) coll.insert_many(pandas_data) QA_util_log_info( "完成股票列表获取", ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=10000 ) except Exception as e: QA_util_log_info(e, ui_log=ui_log) print(" Error save_tdx.QA_SU_save_stock_list exception!") pass
save stock_list Keyword Arguments: client {[type]} -- [description] (default: {DATABASE})
387,749
def upsert(manager, defaults=None, updates=None, **kwargs): defaults = defaults or {} defaults.update(updates or {}) obj, created = manager.get_or_create(defaults=defaults, **kwargs) if updates is not None and not created and any(getattr(obj, k) != updates[k] for k in updates): for k, v in updates.items(): setattr(obj, k, v) obj.save(update_fields=updates) return obj, created
Performs an update on an object or an insert if the object does not exist. :type defaults: dict :param defaults: These values are set when the object is created, but are irrelevant when the object already exists. This field should only be used when values only need to be set during creation. :type updates: dict :param updates: These values are updated when the object is updated. They also override any values provided in the defaults when inserting the object. :param kwargs: These values provide the arguments used when checking for the existence of the object. They are used in a similar manner to Django's get_or_create function. :returns: A tuple of the upserted object and a Boolean that is True if it was created (False otherwise) Examples: .. code-block:: python # Upsert a test model with an int value of 1. Use default values that will be given to it when created model_obj, created = upsert(TestModel.objects, int_field=1, defaults={'float_field': 2.0}) print(created) True print(model_obj.int_field, model_obj.float_field) 1, 2.0 # Do an upsert on that same model with different default fields. Since it already exists, the defaults # are not used model_obj, created = upsert(TestModel.objects, int_field=1, defaults={'float_field': 3.0}) print(created) False print(model_obj.int_field, model_obj.float_field) 1, 2.0 # In order to update the float field in an existing object, use the updates dictionary model_obj, created = upsert(TestModel.objects, int_field=1, updates={'float_field': 3.0}) print(created) False print(model_obj.int_field, model_obj.float_field) 1, 3.0 # You can use updates on a newly created object that will also be used as initial values. model_obj, created = upsert(TestModel.objects, int_field=2, updates={'float_field': 4.0}) print(created) True print(model_obj.int_field, model_obj.float_field) 2, 4.0
387,750
def printPre(self, *args): if len(args): self.b_printPre = args[0] else: return self.b_printPre
get/set the str_pre string.
387,751
def to_volume(self): if hasattr(self.header.definitions, "Lattice"): X, Y, Z = self.header.definitions.Lattice else: raise ValueError("Unable to determine data size") volume = self.decoded_data.reshape(Z, Y, X) return volume
Return a 3D volume of the data
387,752
def get(self, key, value): for element in self.neoindex[key][value]: if self.indexClass == "vertex": yield Vertex(element) elif self.indexClass == "edge": yield Edge(element) else: raise TypeError(self.indexClass)
Gets an element from an index under a given key-value pair @params key: Index key string @params value: Index value string @returns A generator of Vertex or Edge objects
387,753
def sized_imap(func, iterable, strict=False): try: length = len(iterable) except TypeError: if strict: raise else: return imap(func, iterable) return SizedGenerator(lambda: imap(func, iterable), length=length)
Return an iterable whose elements are the result of applying the callable `func` to each element of `iterable`. If `iterable` has a `len()`, then the iterable returned by this function will have the same `len()`. Otherwise calling `len()` on the returned iterable will raise `TypeError`. :param func: The function to apply to each element of `iterable`. :param iterable: An iterable whose objects will be mapped. :param bool strict: If `True` and `iterable` does not support `len()`, raise an exception immediately instead of returning an iterable that does not support `len()`.
387,754
def ingest_user(self): if self.audit_trail and self.audit_trail.records \ and self.audit_trail.records[0].action == : return self.audit_trail.records[0].user
Username responsible for ingesting this object into the repository, as recorded in the :attr:`audit_trail`, if available.
387,755
def XYZ100_to_CIECAM02(self, XYZ100, on_negative_A="raise"): XYZ100 = np.asarray(XYZ100, dtype=float) if XYZ100.shape[-1] != 3: raise ValueError("XYZ100 shape must be (..., 3)") RGB = broadcasting_matvec(M_CAT02, XYZ100) RGB_C = self.D_RGB * RGB RGBprime = broadcasting_matvec(M_HPE_M_CAT02_inv, RGB_C) RGBprime_signs = np.sign(RGBprime) tmp = (self.F_L * RGBprime_signs * RGBprime / 100) ** 0.42 RGBprime_a = RGBprime_signs * 400 * (tmp / (tmp + 27.13)) + 0.1 a = broadcasting_matvec([1, -12. / 11, 1. / 11], RGBprime_a) b = broadcasting_matvec([1. / 9, 1. / 9, -2. / 9], RGBprime_a) h_rad = np.arctan2(b, a) h = np.rad2deg(h_rad) % 360 hprime = np.select([h < h_i[0], True], [h + 360, h]) i = np.searchsorted(h_i, hprime, side="right") - 1 tmp = (hprime - h_i[i]) / e_i[i] H = H_i[i] + ((100 * tmp) / (tmp + (h_i[i + 1] - hprime) / e_i[i + 1])) A = ((broadcasting_matvec([2, 1, 1. / 20], RGBprime_a) - 0.305) * self.N_bb) if on_negative_A == "raise": if np.any(A < 0): raise NegativeAError("attempted to convert a tristimulus " "value whose achromatic signal was " "negative, and on_negative_A=\"raise\"") elif on_negative_A == "nan": A = np.select([A < 0, True], [np.nan, A]) else: raise ValueError("Invalid on_negative_A argument: got %r, " "expected \"raise\" or \"nan\"" % (on_negative_A,)) J = 100 * (A / self.A_w) ** (self.c * self.z) Q = self._J_to_Q(J) e = (12500. / 13) * self.N_c * self.N_cb * (np.cos(h_rad + 2) + 3.8) t = (e * np.sqrt(a ** 2 + b ** 2) / broadcasting_matvec([1, 1, 21. / 20], RGBprime_a)) C = t**0.9 * (J / 100)**0.5 * (1.64 - 0.29**self.n)**0.73 M = C * self.F_L**0.25 s = 100 * (M / Q)**0.5 return JChQMsH(J, C, h, Q, M, s, H)
Computes CIECAM02 appearance correlates for the given tristimulus value(s) XYZ (normalized to be on the 0-100 scale). Example: ``vc.XYZ100_to_CIECAM02([30.0, 45.5, 21.0])`` :param XYZ100: An array-like of tristimulus values. These should be given on the 0-100 scale, not the 0-1 scale. The array-like should have shape ``(..., 3)``; e.g., you can use a simple 3-item list (shape = ``(3,)``), or to efficiently perform multiple computations at once, you could pass a higher-dimensional array, e.g. an image. :arg on_negative_A: A known infelicity of the CIECAM02 model is that for some inputs, the achromatic signal :math:`A` can be negative, which makes it impossible to compute :math:`J`, :math:`C`, :math:`Q`, :math:`M`, or :math:`s` -- only :math:`h`: and :math:`H` are spared. (See, e.g., section 2.6.4.1 of :cite:`Luo-CIECAM02` for discussion.) This argument allows you to specify a strategy for handling such points. Options are: * ``"raise"``: throws a :class:`NegativeAError` (a subclass of :class:`ValueError`) * ``"nan"``: return not-a-number values for the affected elements. (This may be particularly useful if converting a large number of points at once.) :returns: A named tuple of type :class:`JChQMsH`, with attributes ``J``, ``C``, ``h``, ``Q``, ``M``, ``s``, and ``H`` containing the CIECAM02 appearance correlates.
387,756
def _iterate_uniqueness_keys(self, field): uniqueness = getattr(field, , None) if not uniqueness: return for keys in uniqueness: composed_keys = self._compose_keys(keys) yield composed_keys
Iterates over the keys marked as "unique" in the specified field. Arguments: field: The field of which key's to iterate over.
387,757
def get_homes(self, query=None, gps_lat=None, gps_lng=None, offset=0, items_per_grid=8): params = { : , : , : , : str(offset), : , : , : , : str(items_per_grid), : , : , : , : , : } if not query and not (gps_lat and gps_lng): raise MissingParameterError("Missing query or gps coordinates") if query: params[] = query if gps_lat and gps_lng: params[] = gps_lat params[] = gps_lng r = self._session.get(API_URL + , params=params) r.raise_for_status() return r.json()
Search listings with * Query (e.g. query="Lisbon, Portugal") or * Location (e.g. gps_lat=55.6123352&gps_lng=37.7117917)
387,758
def require_perms(view_func, required): from horizon.exceptions import NotAuthorized if required: return dec else: return view_func
Enforces permission-based access controls. :param list required: A tuple of permission names, all of which the request user must possess in order access the decorated view. Example usage:: from horizon.decorators import require_perms @require_perms(['foo.admin', 'foo.member']) def my_view(request): ... Raises a :exc:`~horizon.exceptions.NotAuthorized` exception if the requirements are not met.
387,759
def case_comments(self): comments = (comment for comment in self.comments if comment.variant_id is None) return comments
Return only comments made on the case.
387,760
def export_olx(self, tarball, root_path): def append_asset_to_soup_and_export(asset_): if isinstance(asset_, Item): try: unique_url = asset_.export_olx(tarball, root_path) except AttributeError: pass else: unique_name = get_file_name_without_extension(unique_url) asset_type = asset_.genus_type.identifier asset_tag = my_soup.new_tag(asset_type) asset_tag[] = unique_name getattr(my_soup, my_tag).append(asset_tag) else: try: unique_urls = asset_.export_olx(tarball, root_path) except AttributeError: pass else: for index, ac in enumerate(asset_.get_asset_contents()): asset_type = ac.genus_type.identifier unique_url = unique_urls[index] unique_name = get_file_name_without_extension(unique_url) asset_tag = my_soup.new_tag(asset_type) asset_tag[] = unique_name getattr(my_soup, my_tag).append(asset_tag) def get_file_name_without_extension(filepath): return filepath.split()[-1].replace(, ) my_path = None if self.my_osid_object.is_sequestered(): for asset in self.assets: try: asset.export_olx(tarball, root_path) except AttributeError: pass else: my_tag = self.my_osid_object.genus_type.identifier expected_name = self.get_unique_name(tarball, self.url, my_tag, root_path) my_path = .format(root_path, my_tag, expected_name) my_soup = BeautifulSoup( + my_tag + , ) getattr(my_soup, my_tag)[] = self.my_osid_object.display_name.text if my_tag == : getattr(my_soup, my_tag)[] = self.my_osid_object.group_id_to_child getattr(my_soup, my_tag)[] = self.my_osid_object.user_partition_id.text rm = self.my_osid_object._get_provider_manager() if self.my_osid_object._proxy is None: cls = rm.get_composition_lookup_session() else: cls = rm.get_composition_lookup_session(proxy=self.my_osid_object._proxy) cls.use_federated_repository_view() cls.use_unsequestered_composition_view() for child_id in self.my_osid_object.get_child_ids(): child = cls.get_composition(child_id) if child.is_sequestered(): for asset in child.assets: append_asset_to_soup_and_export(asset) else: child_type = child.genus_type.identifier child_tag = my_soup.new_tag(child_type) child_path = child.export_olx(tarball, root_path) if child_path is not None: child_tag[] = get_file_name_without_extension(child_path) getattr(my_soup, my_tag).append(child_tag) for asset in self.assets: append_asset_to_soup_and_export(asset) self.write_to_tarfile(tarball, my_path, my_soup) return my_path
if sequestered, only export the assets
387,761
def getDescendants(self, all_descendants=False): children = self.getBackReferences( "AnalysisRequestParentAnalysisRequest") descendants = [] if all_descendants: for child in children: descendants.append(child) descendants += child.getDescendants(all_descendants=True) else: descendants = children return descendants
Returns the descendant Analysis Requests :param all_descendants: recursively include all descendants
387,762
def write_oplog_progress(self): if self.oplog_checkpoint is None: return None with self.oplog_progress as oplog_prog: oplog_dict = oplog_prog.get_dict() items = [[name, util.bson_ts_to_long(oplog_dict[name])] for name in oplog_dict] if not items: return backup_file = self.oplog_checkpoint + ".backup" os.rename(self.oplog_checkpoint, backup_file) with open(self.oplog_checkpoint, "w") as dest: if len(items) == 1: json_str = json.dumps(items[0]) else: json_str = json.dumps(items) try: dest.write(json_str) except IOError: dest.truncate() with open(backup_file, "r") as backup: shutil.copyfile(backup, dest) os.remove(backup_file)
Writes oplog progress to file provided by user
387,763
def execute(self, sources, target): fields = self._get_fields_list_from_eps(sources) ret_fields = fields[0] if self.clear_feature: ret_fields = list(self._remove_field_roles(ret_fields, set(six.iterkeys(self.field_mapping)), FieldRole.FEATURE)) target._ml_fields = list(self._set_singleton_role(ret_fields, self.field_mapping))
:type sources: list[DFAdapter] :type target: DFAdapter
387,764
def to_excess_returns(returns, rf, nperiods=None): if type(rf) is float and nperiods is not None: _rf = deannualize(rf, nperiods) else: _rf = rf return returns - _rf
Given a series of returns, it will return the excess returns over rf. Args: * returns (Series, DataFrame): Returns * rf (float, Series): `Risk-Free rate(s) <https://www.investopedia.com/terms/r/risk-freerate.asp>`_ expressed in annualized term or return series * nperiods (int): Optional. If provided, will convert rf to different frequency using deannualize only if rf is a float Returns: * excess_returns (Series, DataFrame): Returns - rf
387,765
def create_environment_vip(self): return EnvironmentVIP( self.networkapi_url, self.user, self.password, self.user_ldap)
Get an instance of environment_vip services facade.
387,766
def list(self, all_my_agents=False, limit=500, offset=0): logger.info("list(all_my_agents=%s, limit=%s, offset=%s)", all_my_agents, limit, offset) if all_my_agents: evt = self._request_entity_list_all(limit=limit, offset=offset) else: evt = self._request_entity_list(limit=limit, offset=offset) self._wait_and_except_if_failed(evt) return evt.payload[]
List `all` the things created by this client on this or all your agents Returns QAPI list function payload Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `all_my_agents` (optional) (boolean) If `False` limit search to just this agent, if `True` return list of things belonging to all agents you own. `limit` (optional) (integer) Return this many Point details `offset` (optional) (integer) Return Point details starting at this offset
387,767
def method(self, returns, **parameter_types): @wrapt.decorator def type_check_wrapper(method, instance, args, kwargs): if instance is not None: raise Exception("Instance shouldn't be set.") parameter_names = inspect.getargspec(method).args defaults = inspect.getargspec(method).defaults parameters = self._collect_parameters(parameter_names, args, kwargs, defaults) parameter_checker.check_types(parameters, parameter_types, self._strict_floats) result = method(*args, **kwargs) parameter_checker.check_return_type(result, returns, self._strict_floats) return result def register_method(method): parameter_names = inspect.getargspec(method).args parameter_checker.check_type_declaration(parameter_names, parameter_types) wrapped_method = type_check_wrapper(method, None, None, None) fully_qualified_name = "{}.{}".format(method.__module__, method.__name__) self.register(fully_qualified_name, wrapped_method, MethodSignature.create(parameter_names, parameter_types, returns)) return wrapped_method return register_method
Syntactic sugar for registering a method Example: >>> registry = Registry() >>> @registry.method(returns=int, x=int, y=int) ... def add(x, y): ... return x + y :param returns: The method's return type :type returns: type :param parameter_types: The types of the method's parameters :type parameter_types: dict[str, type] .. versionadded:: 0.1.0
387,768
def register_callback_reassigned(self, func, serialised=True): self.__client.register_callback_reassigned(partial(self.__callback_payload_only, func), serialised)
Register a callback for resource reassignment. This will be called when any resource is reassigned to or from your agent. If `serialised` is not set, the callbacks might arrive in a different order to they were requested. The payload passed to your callback is an OrderedDict with the following keys #!python r : R_ENTITY, R_FEED, etc # the type of resource reassigned lid : <name> # the local name of the resource epId : <GUID> # the global Id of the agent the # resource has been reassigned *to* id : <GUID> # the global Id of the resource `Note` resource types are defined [here](../Core/Const.m.html) `Note` You can check whether this is an assign "in" or "out" by comparing the epId with your current agent id, using the `IOT.Client.agent_id` property. If it's the same it's a reassign to you. `Example` #!python def reassigned_callback(args): print(args) ... client.register_callback_reassigned(reassigned_callback) This would print out something like the following on assignment of an R_ENTITY to #!python OrderedDict([(u'lid', u'moved_thing'), (u'r', 1), (u'epId', u'5a8d603ee757133d66d99875d0584c72'), (u'id', u'4448993b44738411de5fe2a6cf32d957')])
387,769
def get_reset_data(self, data): error = False reset = None msg = "" user = self.database.users.find_one({"reset": data["reset"]}) if user is None: error = True msg = "Invalid reset hash." else: reset = {"hash": data["reset"], "username": user["username"], "realname": user["realname"]} return msg, error, reset
Returns the user info to reset
387,770
def i2s_frameid(x): try: return PNIO_FRAME_IDS[x] except KeyError: pass if 0x0100 <= x < 0x1000: return "RT_CLASS_3 (%4x)" % x if 0x8000 <= x < 0xC000: return "RT_CLASS_1 (%4x)" % x if 0xC000 <= x < 0xFC00: return "RT_CLASS_UDP (%4x)" % x if 0xFF80 <= x < 0xFF90: return "FragmentationFrameID (%4x)" % x return x
Get representation name of a pnio frame ID :param x: a key of the PNIO_FRAME_IDS dictionary :returns: str
387,771
def do_work_spec(self, args): work_spec_name = self._get_work_spec_name(args) spec = self.task_master.get_work_spec(work_spec_name) if args.json: self.stdout.write(json.dumps(spec, indent=4, sort_keys=True) + ) else: yaml.safe_dump(spec, self.stdout)
dump the contents of an existing work spec
387,772
def start_capture(self, adapter_number, output_file): try: adapter = self._ethernet_adapters[adapter_number] except KeyError: raise VirtualBoxError("Adapter {adapter_number} doesn{name}bridge start_capture {name} "{output_file}"{name}' [{id}]: starting packet capture on adapter {adapter_number}".format(name=self.name, id=self.id, adapter_number=adapter_number))
Starts a packet capture. :param adapter_number: adapter number :param output_file: PCAP destination file for the capture
387,773
def _set_get_stp_brief_info(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=get_stp_brief_info.get_stp_brief_info, is_leaf=True, yang_name="get-stp-brief-info", rest_name="get-stp-brief-info", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u: {u: u, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "rpc", : , }) self.__get_stp_brief_info = t if hasattr(self, ): self._set()
Setter method for get_stp_brief_info, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_brief_info (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_get_stp_brief_info is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_get_stp_brief_info() directly. YANG Description: RPC to return spanning tree information similar to the CLI 'show spanning-tree'.
387,774
def getScriptLocation(): location = os.path.abspath("./") if __file__.rfind("/") != -1: location = __file__[:__file__.rfind("/")] return location
Helper function to get the location of a Python file.
387,775
def _expand_prefix_spec(self, spec, prefix = ): if type(spec) is not dict: raise NipapInputError() for key in spec.keys(): if key not in _prefix_spec: raise NipapExtraneousInputError("Key not allowed in prefix spec.") where = "" params = {} if in spec: if spec != {: spec[]}: raise NipapExtraneousInputError("If specified, no other keys are allowed.") family = None if in spec: family = spec[] del(spec[]) spec2 = {} for k in spec: spec2[prefix + k] = spec[k] spec = spec2 if prefix + in spec: if spec[prefix + ] is None: spec[prefix + ] = 0 if prefix + in spec: spec[] = spec[prefix + ] del(spec[prefix + ]) if prefix + in spec: spec[] = spec[prefix + ] del(spec[prefix + ]) if prefix + in spec: spec[] = spec[prefix + ] del(spec[prefix + ]) where, params = self._sql_expand_where(spec) self._logger.debug("_expand_prefix_spec; where: %s params: %s" % (where, unicode(params))) return where, params
Expand prefix specification to SQL.
387,776
def build(self, endpoint, values=None, method=None, force_external=False, append_unknown=True): self.map.update() if values: if isinstance(values, MultiDict): valueiter = values.iteritems(multi=True) else: valueiter = iteritems(values) values = dict((k, v) for k, v in valueiter if v is not None) else: values = {} rv = self._partial_build(endpoint, values, method, append_unknown) if rv is None: raise BuildError(endpoint, values, method) domain_part, path = rv host = self.get_host(domain_part) if not force_external and ( (self.map.host_matching and host == self.server_name) or (not self.map.host_matching and domain_part == self.subdomain)): return str(urljoin(self.script_name, + path.lstrip())) return str( % ( self.url_scheme, host, self.script_name[:-1], path.lstrip() ))
Building URLs works pretty much the other way round. Instead of `match` you call `build` and pass it the endpoint and a dict of arguments for the placeholders. The `build` function also accepts an argument called `force_external` which, if you set it to `True` will force external URLs. Per default external URLs (include the server name) will only be used if the target URL is on a different subdomain. >>> m = Map([ ... Rule('/', endpoint='index'), ... Rule('/downloads/', endpoint='downloads/index'), ... Rule('/downloads/<int:id>', endpoint='downloads/show') ... ]) >>> urls = m.bind("example.com", "/") >>> urls.build("index", {}) '/' >>> urls.build("downloads/show", {'id': 42}) '/downloads/42' >>> urls.build("downloads/show", {'id': 42}, force_external=True) 'http://example.com/downloads/42' Because URLs cannot contain non ASCII data you will always get bytestrings back. Non ASCII characters are urlencoded with the charset defined on the map instance. Additional values are converted to unicode and appended to the URL as URL querystring parameters: >>> urls.build("index", {'q': 'My Searchstring'}) '/?q=My+Searchstring' If a rule does not exist when building a `BuildError` exception is raised. The build method accepts an argument called `method` which allows you to specify the method you want to have an URL built for if you have different methods for the same endpoint specified. .. versionadded:: 0.6 the `append_unknown` parameter was added. :param endpoint: the endpoint of the URL to build. :param values: the values for the URL to build. Unhandled values are appended to the URL as query parameters. :param method: the HTTP method for the rule if there are different URLs for different methods on the same endpoint. :param force_external: enforce full canonical external URLs. :param append_unknown: unknown parameters are appended to the generated URL as query string argument. Disable this if you want the builder to ignore those.
387,777
def notify(self, msg, color=, notify=, message_format=): self.message_dict = { : msg, : color, : notify, : message_format, } if not self.debug: return requests.post( self.notification_url, json.dumps(self.message_dict), headers=self.headers ) else: print(.format(msg)) return []
Send notification to specified HipChat room
387,778
def possible_version_evaluation(self): only_broken = self.get_options().only_broken ranges = self._ranges yield for target in sorted(filter(self._is_relevant, self.jvm_targets)): min_version = ranges.min_allowed_version.get(target) max_version = ranges.max_allowed_version.get(target) current_valid = True if min_version and self.jvm_version(target) < min_version: current_valid = False if max_version and self.jvm_version(target) > max_version: current_valid = False current_text = str(self.jvm_version(target)) if not current_valid: current_text = self._format_error(current_text) elif only_broken: continue if min_version and max_version: range_text = .format(min_version, max_version) if min_version > max_version: range_text = self._format_error(range_text) elif min_version: range_text = .format(min_version) elif max_version: range_text = .format(max_version) else: range_text = yield .format(address=target.address.spec, range=range_text, current=current_text,) if self.get_options().detailed or not current_valid: if min_version: min_because = [t for t in ranges.target_dependencies[target] if self.jvm_version(t) == min_version] yield .format(min_version) for dep in sorted(min_because): yield .format(dep.address.spec) if max_version: max_because = [t for t in ranges.target_dependees[target] if self.jvm_version(t) == max_version] yield .format(max_version) for dep in sorted(max_because): yield .format(dep.address.spec) yield
Evaluate the possible range of versions for each target, yielding the output analysis.
387,779
def load_schema(schema): with repo.Repo.db: repo.Repo.db.executescript(schema)
Load a schema file with path +schema+ into the database. Assumes that there exists an active database connection.
387,780
def dead(self): if not self._weak: return False cb = self._callback() if cb is None: return True return False
Whether the callback no longer exists. If the callback is maintained via a weak reference, and that weak reference has been collected, this will be true instead of false.
387,781
async def _process_latching(self, key, latching_entry): if latching_entry[Constants.LATCH_CALLBACK]: if latching_entry[Constants.LATCH_CALLBACK_TYPE]: await latching_entry[Constants.LATCH_CALLBACK] \ ([key, latching_entry[Constants.LATCHED_DATA], time.time()]) else: latching_entry[Constants.LATCH_CALLBACK] \ ([key, latching_entry[Constants.LATCHED_DATA], time.time()]) self.latch_map[key] = [0, 0, 0, 0, 0, None] else: updated_latch_entry = latching_entry updated_latch_entry[Constants.LATCH_STATE] = \ Constants.LATCH_LATCHED updated_latch_entry[Constants.LATCHED_DATA] = \ latching_entry[Constants.LATCHED_DATA] updated_latch_entry[Constants.LATCHED_TIME_STAMP] = time.time() self.latch_map[key] = updated_latch_entry
This is a private utility method. This method process latching events and either returns them via callback or stores them in the latch map :param key: Encoded pin :param latching_entry: a latch table entry :returns: Callback or store data in latch map
387,782
def compute_laplacian(self, lap_type=): r if lap_type != self.lap_type:
r"""Compute a graph Laplacian. For undirected graphs, the combinatorial Laplacian is defined as .. math:: L = D - W, where :math:`W` is the weighted adjacency matrix and :math:`D` the weighted degree matrix. The normalized Laplacian is defined as .. math:: L = I - D^{-1/2} W D^{-1/2}, where :math:`I` is the identity matrix. For directed graphs, the Laplacians are built from a symmetrized version of the weighted adjacency matrix that is the average of the weighted adjacency matrix and its transpose. As the Laplacian is defined as the divergence of the gradient, it is not affected by the orientation of the edges. For both Laplacians, the diagonal entries corresponding to disconnected nodes (i.e., nodes with degree zero) are set to zero. Once computed, the Laplacian is accessible by the attribute :attr:`L`. Parameters ---------- lap_type : {'combinatorial', 'normalized'} The kind of Laplacian to compute. Default is combinatorial. Examples -------- Combinatorial and normalized Laplacians of an undirected graph. >>> graph = graphs.Graph([ ... [0, 2, 0], ... [2, 0, 1], ... [0, 1, 0], ... ]) >>> graph.compute_laplacian('combinatorial') >>> graph.L.toarray() array([[ 2., -2., 0.], [-2., 3., -1.], [ 0., -1., 1.]]) >>> graph.compute_laplacian('normalized') >>> graph.L.toarray() array([[ 1. , -0.81649658, 0. ], [-0.81649658, 1. , -0.57735027], [ 0. , -0.57735027, 1. ]]) Combinatorial and normalized Laplacians of a directed graph. >>> graph = graphs.Graph([ ... [0, 2, 0], ... [2, 0, 1], ... [0, 0, 0], ... ]) >>> graph.compute_laplacian('combinatorial') >>> graph.L.toarray() array([[ 2. , -2. , 0. ], [-2. , 2.5, -0.5], [ 0. , -0.5, 0.5]]) >>> graph.compute_laplacian('normalized') >>> graph.L.toarray() array([[ 1. , -0.89442719, 0. ], [-0.89442719, 1. , -0.4472136 ], [ 0. , -0.4472136 , 1. ]]) The Laplacian is defined as the divergence of the gradient. See :meth:`compute_differential_operator` for details. >>> graph = graphs.Path(20) >>> graph.compute_differential_operator() >>> L = graph.D.dot(graph.D.T) >>> np.all(L.toarray() == graph.L.toarray()) True The Laplacians have a bounded spectrum. >>> G = graphs.Sensor(50) >>> G.compute_laplacian('combinatorial') >>> G.compute_fourier_basis() >>> -1e-10 < G.e[0] < 1e-10 < G.e[-1] < 2*np.max(G.dw) True >>> G.compute_laplacian('normalized') >>> G.compute_fourier_basis() >>> -1e-10 < G.e[0] < 1e-10 < G.e[-1] < 2 True
387,783
def register_languages(): for language in set(SUPPORTED_LANGUAGES) - {"en"}: language_stemmer = partial(nltk_stemmer, get_language_stemmer(language)) Pipeline.register_function(language_stemmer, "stemmer-{}".format(language))
Register all supported languages to ensure compatibility.
387,784
def _process_image_msg(self, msg): encoding = msg.encoding try: image = self._bridge.imgmsg_to_cv2(msg, encoding) except CvBridgeError as e: rospy.logerr(e) return image
Process an image message and return a numpy array with the image data Returns ------- :obj:`numpy.ndarray` containing the image in the image message Raises ------ CvBridgeError If the bridge is not able to convert the image
387,785
def gen_unordered(self): operations = [_Run(_INSERT), _Run(_UPDATE), _Run(_DELETE)] for idx, (op_type, operation) in enumerate(self.ops): operations[op_type].add(idx, operation) for run in operations: if run.ops: yield run
Generate batches of operations, batched by type of operation, in arbitrary order.
387,786
def calculate_request_digest(method, partial_digest, digest_response=None, uri=None, nonce=None, nonce_count=None, client_nonce=None): responsepartial_digest if digest_response: if uri or nonce or nonce_count or client_nonce: raise Exception("Both digest_response and one or more " "individual parameters were sent.") uri = digest_response.uri nonce = digest_response.nonce nonce_count = digest_response.nc client_nonce=digest_response.cnonce elif not (uri and nonce and (nonce_count != None) and client_nonce): raise Exception("Neither digest_response nor all individual parameters were sent.") ha2 = md5.md5("%s:%s" % (method, uri)).hexdigest() data = "%s:%s:%s:%s:%s" % (nonce, "%08x" % nonce_count, client_nonce, , ha2) kd = md5.md5("%s:%s" % (partial_digest, data)).hexdigest() return kd
Calculates a value for the 'response' value of the client authentication request. Requires the 'partial_digest' calculated from the realm, username, and password. Either call it with a digest_response to use the values from an authentication request, or pass the individual parameters (i.e. to generate an authentication request).
387,787
def load(cls, path_to_file): import mimetypes mimetypes.init() mime = mimetypes.guess_type( % path_to_file)[0] img_type = ImageTypeEnum.lookup_by_mime_type(mime) with open(path_to_file, ) as f: data = f.read() return Image(data, image_type=img_type)
Loads the image data from a file on disk and tries to guess the image MIME type :param path_to_file: path to the source file :type path_to_file: str :return: a `pyowm.image.Image` instance
387,788
def run_preassembly_related(preassembler, beliefengine, **kwargs): logger.info( % len(preassembler.unique_stmts)) return_toplevel = kwargs.get(, True) poolsize = kwargs.get(, None) size_cutoff = kwargs.get(, 100) stmts_out = preassembler.combine_related(return_toplevel=False, poolsize=poolsize, size_cutoff=size_cutoff) beliefengine.set_hierarchy_probs(stmts_out) do_flatten_evidence = kwargs.get(, False) if do_flatten_evidence: flatten_evidences_collect_from = \ kwargs.get(, ) stmts_out = flatten_evidence(stmts_out, flatten_evidences_collect_from) stmts_top = filter_top_level(stmts_out) if return_toplevel: stmts_out = stmts_top logger.info( % len(stmts_out)) else: logger.info( % (len(stmts_out), len(stmts_top))) dump_pkl = kwargs.get() if dump_pkl: dump_statements(stmts_out, dump_pkl) return stmts_out
Run related stage of preassembly on a list of statements. Parameters ---------- preassembler : indra.preassembler.Preassembler A Preassembler instance which already has a set of unique statements internally. beliefengine : indra.belief.BeliefEngine A BeliefEngine instance. return_toplevel : Optional[bool] If True, only the top-level statements are returned. If False, all statements are returned irrespective of level of specificity. Default: True poolsize : Optional[int] The number of worker processes to use to parallelize the comparisons performed by the function. If None (default), no parallelization is performed. NOTE: Parallelization is only available on Python 3.4 and above. size_cutoff : Optional[int] Groups with size_cutoff or more statements are sent to worker processes, while smaller groups are compared in the parent process. Default value is 100. Not relevant when parallelization is not used. flatten_evidence : Optional[bool] If True, evidences are collected and flattened via supports/supported_by links. Default: False flatten_evidence_collect_from : Optional[str] String indicating whether to collect and flatten evidence from the `supports` attribute of each statement or the `supported_by` attribute. If not set, defaults to 'supported_by'. Only relevant when flatten_evidence is True. save : Optional[str] The name of a pickle file to save the results (stmts_out) into. Returns ------- stmts_out : list[indra.statements.Statement] A list of preassembled top-level statements.
387,789
def visible(self, visible): self._visible = visible if visible and len(self.panel.objects) == 0: self.setup() self.select.visible = True self.control_panel.extend(self.controls) self.panel.extend(self.children) elif not visible and len(self.panel.objects) > 0: self.unwatch() self.select.visible = False self.control_panel.clear() self.search.visible = False self.add.visible = False self.panel.clear() if self.visible_callback: self.visible_callback(visible)
When visible changed, do setup or unwatch and call visible_callback
387,790
def eval(self, expression, args=None, *, timeout=-1.0, push_subscribe=False) -> _MethodRet: return self._db.eval(expression, args, timeout=timeout, push_subscribe=push_subscribe)
Eval request coroutine. Examples: .. code-block:: pycon >>> await conn.eval('return 42') <Response sync=3 rowcount=1 data=[42]> >>> await conn.eval('return box.info.version') <Response sync=3 rowcount=1 data=['2.1.1-7-gd381a45b6']> :param expression: expression to execute :param args: arguments to pass to the function, that will execute your expression (list object) :param timeout: Request timeout :param push_subscribe: Subscribe to push messages :returns: :class:`asynctnt.Response` instance
387,791
def run(configobj, wcsmap=None): .format(__version__, util._ptime()[0])), file=sys.stderr) return log.info("USER INPUT PARAMETERS common to all Processing Steps:") util.printParams(configobj, log=log) staticMask.createStaticMask(imgObjList, configobj, procSteps=procSteps) sky.subtractSky(imgObjList, configobj, procSteps=procSteps) adrizzle.drizSeparate(imgObjList, outwcs, configobj, wcsmap=wcsmap, procSteps=procSteps) createMedian.createMedian(imgObjList, configobj, procSteps=procSteps) ablot.runBlot(imgObjList, outwcs, configobj, wcsmap=wcsmap, procSteps=procSteps) drizCR.rundrizCR(imgObjList, configobj, procSteps=procSteps) adrizzle.drizFinal(imgObjList, outwcs, configobj, wcsmap=wcsmap, procSteps=procSteps) print() print("AstroDrizzle Version {:s} is finished processing at {:s}.\n" .format(__version__, util._ptime()[0])) except: clean = False print(textutil.textbox( "ERROR:\nAstroDrizzle Version {:s} encountered a problem! " "Processing terminated at {:s}." .format(__version__, util._ptime()[0])), file=sys.stderr) raise finally: procSteps.reportTimes() if imgObjList: for image in imgObjList: if clean: image.clean() image.close() del imgObjList del outwcs
Initial example by Nadia ran MD with configobj EPAR using: It can be run in one of two ways: from stsci.tools import teal 1. Passing a config object to teal teal.teal('drizzlepac/pars/astrodrizzle.cfg') 2. Passing a task name: teal.teal('astrodrizzle') The example config files are in drizzlepac/pars
387,792
def _check_minions_directories(pki_dir): minions_accepted = os.path.join(pki_dir, salt.key.Key.ACC) minions_pre = os.path.join(pki_dir, salt.key.Key.PEND) minions_rejected = os.path.join(pki_dir, salt.key.Key.REJ) minions_denied = os.path.join(pki_dir, salt.key.Key.DEN) return minions_accepted, minions_pre, minions_rejected, minions_denied
Return the minion keys directory paths. This function is a copy of salt.key.Key._check_minions_directories.
387,793
def write(self, filename, type_=): if os.path.exists(filename): name, ext = filename.rsplit(,1) try: prefix, num = name.rsplit(,1) n = int(num) n += 1 filename = prefix + + str(n) + + ext except ValueError: filename = name + + ext print( % filename) self.write(filename, type_) else: with open(filename, , encoding=) as f: if type_ == : f.write(str(self)) elif type_ == : f.write(self.__ttl__()) else: raise TypeError( % type_)
Write file, will not overwrite files with the same name outputs to obo by default but can also output to ttl if passed type_='ttl' when called.
387,794
def parsebool(el): txt = text(el) up = txt.upper() if up == "OUI": return True if up == "NON": return False return bool(parseint(el))
Parse a ``BeautifulSoup`` element as a bool
387,795
def namedb_history_save( cur, opcode, history_id, creator_address, value_hash, block_id, vtxindex, txid, accepted_rec, history_snapshot=False ): assert in accepted_rec, "Malformed record at ({},{}): missing op".format(block_id, accepted_rec[]) op = accepted_rec[] record_data = op_canonicalize(opcode, accepted_rec) record_txt = json.dumps(record_data, sort_keys=True) history_insert = { "txid": txid, "history_id": history_id, "creator_address": creator_address, "block_id": block_id, "vtxindex": vtxindex, "op": op, "opcode": opcode, "history_data": record_txt, : value_hash } try: query, values = namedb_insert_prepare( cur, history_insert, "history" ) except Exception, e: log.exception(e) log.error("FATAL: failed to append history record for at (%s, %s)" % (history_id, block_id, vtxindex)) os.abort() namedb_query_execute( cur, query, values ) return True
Insert data into the state engine's history. It must be for a never-before-seen (txid,block_id,vtxindex) set. @history_id is either the name or namespace ID Return True on success Raise an Exception on error
387,796
def record_serializer(self): imp = current_app.config[] if isinstance(imp, string_types): return import_string(imp) return imp
Github Release API class.
387,797
def run(self): import salt.client self.parse_args() if self.config[] not in (, ): self.setup_logfile_logger() verify_log(self.config) try: if not in kwargs and not in kwargs and self.options.eauth:
Execute the salt command line
387,798
def fig16(): tpm = np.array([ [0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 1, 0, 0], [1, 0, 1, 0, 0, 0, 0], [1, 0, 0, 0, 1, 0, 0], [1, 0, 0, 0, 0, 0, 0], [1, 0, 1, 0, 1, 0, 0], [1, 0, 1, 0, 0, 0, 0], [1, 0, 0, 0, 1, 0, 0], [0, 0, 0, 1, 1, 0, 0], [0, 0, 1, 1, 1, 0, 0], [1, 0, 1, 1, 1, 0, 0], [1, 0, 0, 1, 1, 0, 0], [1, 0, 0, 1, 1, 0, 0], [1, 1, 1, 1, 1, 0, 0], [1, 0, 1, 1, 1, 0, 0], [1, 1, 0, 1, 1, 0, 0], [0, 0, 0, 1, 1, 0, 0], [0, 0, 1, 1, 1, 0, 0], [1, 0, 1, 1, 1, 0, 0], [1, 0, 0, 1, 1, 0, 0], [1, 0, 0, 1, 1, 0, 0], [1, 0, 1, 1, 1, 0, 0], [1, 0, 1, 1, 1, 0, 0], [1, 0, 0, 1, 1, 0, 0], [0, 0, 0, 1, 1, 0, 0], [0, 0, 1, 1, 1, 0, 0], [1, 0, 1, 1, 1, 0, 0], [1, 0, 0, 1, 1, 0, 0], [1, 0, 0, 1, 1, 0, 0], [1, 1, 1, 1, 1, 0, 0], [1, 0, 1, 1, 1, 0, 0], [1, 1, 0, 1, 1, 0, 0], [0, 0, 0, 0, 0, 1, 0], [0, 0, 1, 0, 1, 1, 0], [1, 0, 1, 0, 0, 1, 0], [1, 0, 0, 0, 1, 1, 0], [1, 0, 0, 0, 0, 1, 0], [1, 0, 1, 0, 1, 1, 0], [1, 0, 1, 0, 0, 1, 0], [1, 0, 0, 0, 1, 1, 0], [0, 0, 0, 1, 1, 1, 0], [0, 0, 1, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [1, 1, 1, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 1, 0, 1, 1, 1, 0], [0, 0, 0, 1, 1, 1, 0], [0, 0, 1, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [0, 0, 0, 1, 1, 1, 0], [0, 0, 1, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [1, 1, 1, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 1, 0, 1, 1, 1, 0], [0, 0, 0, 0, 0, 1, 0], [0, 0, 1, 0, 1, 1, 0], [1, 0, 1, 0, 0, 1, 0], [1, 0, 0, 0, 1, 1, 0], [1, 0, 0, 0, 0, 1, 0], [1, 0, 1, 0, 1, 1, 0], [1, 0, 1, 0, 0, 1, 0], [1, 0, 0, 0, 1, 1, 0], [0, 0, 0, 1, 1, 1, 0], [0, 0, 1, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [1, 1, 1, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 1, 0, 1, 1, 1, 0], [0, 0, 0, 1, 1, 1, 0], [0, 0, 1, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [0, 0, 0, 1, 1, 1, 0], [0, 0, 1, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [1, 0, 0, 1, 1, 1, 0], [1, 1, 1, 1, 1, 1, 0], [1, 0, 1, 1, 1, 1, 0], [1, 1, 0, 1, 1, 1, 0], [0, 0, 0, 0, 0, 1, 1], [0, 0, 1, 0, 1, 1, 1], [1, 0, 1, 0, 0, 1, 1], [1, 0, 0, 0, 1, 1, 1], [1, 0, 0, 0, 0, 1, 1], [1, 0, 1, 0, 1, 1, 1], [1, 0, 1, 0, 0, 1, 1], [1, 0, 0, 0, 1, 1, 1], [0, 0, 0, 1, 1, 1, 1], [0, 0, 1, 1, 1, 1, 1], [1, 0, 1, 1, 1, 1, 1], [1, 0, 0, 1, 1, 1, 1], [1, 0, 0, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1], [1, 0, 1, 1, 1, 1, 1], [1, 1, 0, 1, 1, 1, 1], [0, 0, 0, 1, 1, 1, 1], [0, 0, 1, 1, 1, 1, 1], [1, 0, 1, 1, 1, 1, 1], [1, 0, 0, 1, 1, 1, 1], [1, 0, 0, 1, 1, 1, 1], [1, 0, 1, 1, 1, 1, 1], [1, 0, 1, 1, 1, 1, 1], [1, 0, 0, 1, 1, 1, 1], [0, 0, 0, 1, 1, 1, 1], [0, 0, 1, 1, 1, 1, 1], [1, 0, 1, 1, 1, 1, 1], [1, 0, 0, 1, 1, 1, 1], [1, 0, 0, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1], [1, 0, 1, 1, 1, 1, 1], [1, 1, 0, 1, 1, 1, 1] ]) cm = np.array([ [0, 1, 1, 0, 1, 0, 0], [1, 0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 0, 0], [0, 1, 0, 1, 1, 0, 0], [0, 0, 0, 1, 1, 0, 0], [0, 0, 0, 0, 0, 1, 1], [0, 0, 0, 0, 0, 1, 1] ]) return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
The network shown in Figure 5B of the 2014 IIT 3.0 paper.
387,799
def sanitize_type(raw_type): cleaned = get_printable(raw_type).strip() for bad in [ r, r, r, r, r, r, r, r, r ]: cleaned = re.sub(bad, , cleaned).strip() if cleaned in [, ]: cleaned = cleaned.strip() cleaned = cleaned.replace(, ) return cleaned
Sanitize the raw type string.