Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
18,300
def write_wrapped(self, s, extra_room=0): if self.room < len(s) + extra_room: self.write_soft_break() self.write_str(s)
Add a soft line break if needed, then write s.
18,301
def _login(self, csrf_token): login_response = self.session.post( self.URLS[], data={ : self._get_provider_option() or , : self._get_provider_option() or , : , : csrf_token, } ) self._log(, login_response) assert login_response.status_code == 200, \ assert login_response.url == self.URLS[], \
Attempt to login session on easyname.
18,302
def _prep_binary_content(self): if not self.data and not self.location and not in self.resource.headers.keys(): raise Exception() elif in self.resource.headers.keys(): logger.debug() self.delivery = elif not in self.resource.headers.keys(): if self.location: self.resource.headers[] = self.location self.delivery = elif self.data: if isinstance(self.data, io.BufferedIOBase): logger.debug() self.delivery = else: logger.debug() self.delivery =
Sets delivery method of either payload or header Favors Content-Location header if set Args: None Returns: None: sets attributes in self.binary and headers
18,303
def _cleanup(self): self._declare_cb = None self._bind_cb = None self._unbind_cb = None self._delete_cb = None self._purge_cb = None super(QueueClass, self)._cleanup()
Cleanup all the local data.
18,304
def choose_branch(exclude=None): if exclude is None: master = conf.get(, ) develop = conf.get(, ) exclude = {master, develop} branches = list(set(git.branches()) - exclude) for i, branch_name in enumerate(branches): shell.cprint(.format(i + 1, branch_name)) choice = 0 while choice < 1 or choice > len(branches): prompt = "Pick a base branch from the above [1-{}]".format( len(branches) ) choice = click.prompt(prompt, value_proc=int) if not (1 <= choice <= len(branches)): fmt = "Invalid choice {}, you must pick a number between {} and {}" log.err(fmt.format(choice, 1, len(branches))) return branches[choice - 1]
Show the user a menu to pick a branch from the existing ones. Args: exclude (list[str]): List of branch names to exclude from the menu. By default it will exclude master and develop branches. To show all branches pass an empty array here. Returns: str: The name of the branch chosen by the user. If the user inputs an invalid choice, he will be asked again (and again) until he picks a a valid branch.
18,305
def _qteUpdateLabelWidths(self): layout = self.layout() for ii in range(layout.count()): label = layout.itemAt(ii) layout.removeItem(label) for item in self._qteModeList: label = item[2] width = label.fontMetrics().size(0, str(item[1])).width() label.setMaximumWidth(width) label.setMinimumWidth(width) layout.addWidget(label) _, _, label = self._qteModeList[-1] label.setMaximumWidth(1600000)
Ensure all but the last ``QLabel`` are only as wide as necessary. The width of the last label is manually set to a large value to ensure that it stretches as much as possible. The height of all widgets is also set appropriately. The method also takes care or rearranging the widgets in the correct order, ie. in the order specified by ``self._qteModeList``. |Args| * **None** |Returns| * **None** |Raises| * **None**
18,306
def publish(self, name, data, userList): self.broadcast(userList, { "name": name, "data": SockJSDefaultHandler._parser.encode(data) })
Publish data
18,307
def provider(self, name, history=None): if history is None: history = [] history.append(self) if name in self.definitions: return self for x in self.links: if x in history: continue provider = x.provider(name, history) if provider is not None: return provider history.remove(self) if len(history): return None return self
Find the provider of the property by I{name}. @param name: The property name. @type name: str @param history: A history of nodes checked to prevent circular hunting. @type history: [L{Properties},..] @return: The provider when found. Otherwise, None (when nested) and I{self} when not nested. @rtype: L{Properties}
18,308
def AsPrimitiveProto(self): if self.protobuf: result = self.protobuf() result.ParseFromString(self.SerializeToString()) return result
Return an old style protocol buffer object.
18,309
def targetSigma2(self,R,log=False): return self._surfaceSigmaProfile.sigma2(R,log=log)
NAME: targetSigma2 PURPOSE: evaluate the target Sigma_R^2(R) INPUT: R - radius at which to evaluate (can be Quantity) OUTPUT: target Sigma_R^2(R) log - if True, return the log (default: False) HISTORY: 2010-03-28 - Written - Bovy (NYU)
18,310
def _validate_arch(self, arch = None): if not arch: arch = win32.arch if arch not in self.supported: msg = "The %s engine cannot decode %s code." msg = msg % (self.name, arch) raise NotImplementedError(msg) return arch
@type arch: str @param arch: Name of the processor architecture. If not provided the current processor architecture is assumed. For more details see L{win32.version._get_arch}. @rtype: str @return: Name of the processor architecture. If not provided the current processor architecture is assumed. For more details see L{win32.version._get_arch}. @raise NotImplementedError: This disassembler doesn't support the requested processor architecture.
18,311
def ver(self, value): if value == self._defaults[] and in self._values: del self._values[] else: self._values[] = value
The ver property. Args: value (int). the property value.
18,312
def create_namespaced_endpoints(self, namespace, body, **kwargs): kwargs[] = True if kwargs.get(): return self.create_namespaced_endpoints_with_http_info(namespace, body, **kwargs) else: (data) = self.create_namespaced_endpoints_with_http_info(namespace, body, **kwargs) return data
create Endpoints This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_namespaced_endpoints(namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Endpoints body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :return: V1Endpoints If the method is called asynchronously, returns the request thread.
18,313
def _call_geocoder( self, url, timeout=DEFAULT_SENTINEL, raw=False, requester=None, deserializer=json.loads, **kwargs ): if requester: req = url status_code = page.status_code else: status_code = None if status_code in ERROR_CODE_MAP: raise ERROR_CODE_MAP[page.status_code]("\n%s" % decode_page(page)) if raw: return page page = decode_page(page) if deserializer is not None: try: return deserializer(page) except ValueError: raise GeocoderParseError( "Could not deserialize using deserializer:\n%s" % page ) else: return page
For a generated query URL, get the results.
18,314
def every_other(x, name=None): with tf.name_scope(name, , [x]) as scope: x = tf.convert_to_tensor(x, name=) return tf.reshape( tf.slice( tf.reshape(x, [-1, 2]), [0, 0], [-1, 1]), [-1], name=scope)
Drops every other value from the tensor and returns a 1D tensor. This is useful if you are running multiple inputs through a model tower before splitting them and you want to line it up with some other data. Args: x: the target tensor. name: the name for this op, defaults to every_other Returns: A tensorflow op.
18,315
def forum_topic_undelete(self, topic_id): return self._get(.format(topic_id), method=, auth=True)
Un delete a topic (Login requries) (Moderator+) (UNTESTED). Parameters: topic_id (int): Where topic_id is the topic id.
18,316
def textwidth(self, text, config): surface = cairo.SVGSurface(None, 1280, 200) ctx = cairo.Context(surface) ctx.select_font_face(config[], cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_BOLD) ctx.set_font_size(int(config[])) return ctx.text_extents(text)[2] + 2
Calculates the width of the specified text.
18,317
def match_status_code(self, entry, status_code, regex=True): if regex: return re.search(status_code, str(entry[][])) is not None else: return str(entry[][]) == status_code
Helper function that returns entries with a status code matching then given `status_code` argument. NOTE: This is doing a STRING comparison NOT NUMERICAL :param entry: entry object to analyze :param status_code: ``str`` of status code to search for :param request_type: ``regex`` of request type to match
18,318
def map(self, options=None): for path, data in self.paths.items(): references = data.get("references", []) for item in data["items"]: for obj in self.create_class(item, options, references=references): self.add_object(obj) self.organize_objects()
Trigger find of serialized sources and build objects
18,319
def build_loss(model_logits, sparse_targets): time_major_shape = [FLAGS.unroll_steps, FLAGS.batch_size] flat_batch_shape = [FLAGS.unroll_steps * FLAGS.batch_size, -1] xent = tf.nn.sparse_softmax_cross_entropy_with_logits( logits=tf.reshape(model_logits, flat_batch_shape), labels=tf.reshape(sparse_targets, flat_batch_shape[:-1])) xent = tf.reshape(xent, time_major_shape) sequence_neg_log_prob = tf.reduce_sum(xent, axis=0) return tf.reduce_mean(sequence_neg_log_prob, axis=0)
Compute the log loss given predictions and targets.
18,320
def find_ports(device): bus_id = device.bus dev_id = device.address for dirent in os.listdir(USB_SYS_PREFIX): matches = re.match(USB_PORTS_STR + , dirent) if matches: bus_str = readattr(dirent, ) if bus_str: busnum = float(bus_str) else: busnum = None dev_str = readattr(dirent, ) if dev_str: devnum = float(dev_str) else: devnum = None if busnum == bus_id and devnum == dev_id: return str(matches.groups()[1])
Find the port chain a device is plugged on. This is done by searching sysfs for a device that matches the device bus/address combination. Useful when the underlying usb lib does not return device.port_number for whatever reason.
18,321
def initialize_page_data(self): if self.term.is_a_tty: self.display_initialize() self.character_generator = self.character_factory(self.screen.wide) page_data = list() while True: try: page_data.append(next(self.character_generator)) except StopIteration: break if LIMIT_UCS == 0x10000: echo(self.term.center().rstrip()) flushout() self.term.inkey(timeout=None) return page_data
Initialize the page data for the given screen.
18,322
def labeled_intervals(intervals, labels, label_set=None, base=None, height=None, extend_labels=True, ax=None, tick=True, **kwargs): ax, _ = __get_axes(ax=ax) intervals = np.atleast_2d(intervals) if label_set is None: label_set = [_.get_text() for _ in ax.get_yticklabels()] if not any(label_set): label_set = [] else: label_set = list(label_set) if extend_labels: ticks = label_set + sorted(set(labels) - set(label_set)) elif label_set: ticks = label_set else: ticks = sorted(set(labels)) style = dict(linewidth=1) style.update(next(ax._get_patches_for_fill.prop_cycler)) style[] = style.pop() style.update(kwargs) if base is None: base = np.arange(len(ticks)) if height is None: height = 1 if np.isscalar(height): height = height * np.ones_like(base) seg_y = dict() for ybase, yheight, lab in zip(base, height, ticks): seg_y[lab] = (ybase, yheight) xvals = defaultdict(list) for ival, lab in zip(intervals, labels): if lab not in seg_y: continue xvals[lab].append((ival[0], ival[1] - ival[0])) for lab in seg_y: ax.add_collection(BrokenBarHCollection(xvals[lab], seg_y[lab], **style)) style.pop(, None) if label_set != ticks: ax.axhline(len(label_set), color=, alpha=0.5) if tick: ax.grid(True, axis=) ax.set_yticks([]) ax.set_yticks(base) ax.set_yticklabels(ticks, va=) ax.yaxis.set_major_formatter(IntervalFormatter(base, ticks)) if base.size: __expand_limits(ax, [base.min(), (base + height).max()], which=) if intervals.size: __expand_limits(ax, [intervals.min(), intervals.max()], which=) return ax
Plot labeled intervals with each label on its own row. Parameters ---------- intervals : np.ndarray, shape=(n, 2) segment intervals, in the format returned by :func:`mir_eval.io.load_intervals` or :func:`mir_eval.io.load_labeled_intervals`. labels : list, shape=(n,) reference segment labels, in the format returned by :func:`mir_eval.io.load_labeled_intervals`. label_set : list An (ordered) list of labels to determine the plotting order. If not provided, the labels will be inferred from ``ax.get_yticklabels()``. If no ``yticklabels`` exist, then the sorted set of unique values in ``labels`` is taken as the label set. base : np.ndarray, shape=(n,), optional Vertical positions of each label. By default, labels are positioned at integers ``np.arange(len(labels))``. height : scalar or np.ndarray, shape=(n,), optional Height for each label. If scalar, the same value is applied to all labels. By default, each label has ``height=1``. extend_labels : bool If ``False``, only values of ``labels`` that also exist in ``label_set`` will be shown. If ``True``, all labels are shown, with those in `labels` but not in `label_set` appended to the top of the plot. A horizontal line is drawn to indicate the separation between values in or out of ``label_set``. ax : matplotlib.pyplot.axes An axis handle on which to draw the intervals. If none is provided, a new set of axes is created. tick : bool If ``True``, sets tick positions and labels on the y-axis. kwargs Additional keyword arguments to pass to `matplotlib.collection.BrokenBarHCollection`. Returns ------- ax : matplotlib.pyplot.axes._subplots.AxesSubplot A handle to the (possibly constructed) plot axes
18,323
def run(addr, *commands, **kwargs): results = [] handler = VarnishHandler(addr, **kwargs) for cmd in commands: if isinstance(cmd, tuple) and len(cmd)>1: results.extend([getattr(handler, c[0].replace(,))(*c[1:]) for c in cmd]) else: results.append(getattr(handler, cmd.replace(,))(*commands[1:])) break handler.close() return results
Non-threaded batch command runner returning output results
18,324
def getWidget(self,**kwargs): from .widget import Widget from ipywidgets import DOMWidget from IPython.display import display, HTML if not hasattr(self, ): self._widgets = [] def display_heartbeat(simp): for w in self._widgets: w.refresh(simp,isauto=1) self.visualization = VISUALIZATIONS["webgl"] clibrebound.reb_display_init_data(byref(self)); self._dhbf = AFF(display_heartbeat) self._display_heartbeat = self._dhbf display(HTML(Widget.getClientCode())) newWidget = Widget(self,**kwargs) self._widgets.append(newWidget) newWidget.refresh(isauto=0) return newWidget
Wrapper function that returns a new widget attached to this simulation. Widgets provide real-time 3D visualizations from within an Jupyter notebook. See the Widget class for more details on the possible arguments. Arguments --------- All arguments passed to this wrapper function will be passed to /Widget class. Returns ------- A rebound.Widget object. Examples -------- >>> sim = rebound.Simulation() >>> sim.add(m=1.) >>> sim.add(m=1.e-3,x=1.,vy=1.) >>> sim.getWidget()
18,325
def find_warnings(content): keywords = [k.lower() for k in [ "WARNING", "Couldnt read", "marked missing", "Attempt to close device", "Ignoring supplied major", "not match metadata" ]] for l in content: lower = l.strip().lower() if not lower.startswith(): if any(k in lower for k in keywords): yield l
Look for lines containing warning/error/info strings instead of data.
18,326
def add(self, value): if not isinstance(value, float): value = float(value) return self._do_add(value)
Add a value to the reservoir The value will be casted to a floating-point, so a TypeError or a ValueError may be raised.
18,327
def __create(self, options, collation, session): cmd = SON([("create", self.__name)]) if options: if "size" in options: options["size"] = float(options["size"]) cmd.update(options) with self._socket_for_writes(session) as sock_info: self._command( sock_info, cmd, read_preference=ReadPreference.PRIMARY, write_concern=self._write_concern_for(session), collation=collation, session=session)
Sends a create command with the given options.
18,328
def hangup_all_calls(self): path = + self.api_version + method = return self.request(path, method)
REST Hangup All Live Calls Helper
18,329
def argsort(self, axis=-1, kind="quicksort", order=None): return self.view(np.ndarray).argsort(axis, kind, order)
Returns the indices that would sort the array. See the documentation of ndarray.argsort for details about the keyword arguments. Example ------- >>> from unyt import km >>> data = [3, 8, 7]*km >>> print(np.argsort(data)) [0 2 1] >>> print(data.argsort()) [0 2 1]
18,330
def retweets(self, tweet_id): log.info("retrieving retweets of %s", tweet_id) url = "https://api.twitter.com/1.1/statuses/retweets/""{}.json".format( tweet_id) resp = self.get(url, params={"count": 100}) for tweet in resp.json(): yield tweet
Retrieves up to the last 100 retweets for the provided tweet.
18,331
def all(self, command, params=None): dr = self.query(command, params) return dr[]
Возвращает строки ответа, полученного через query > db.query('SELECT * FORM users WHERE id=:id', {"id":MY_USER_ID}) :param command: SQL запрос :param params: Параметры для prepared statements :rtype: list of dict
18,332
def try_handle_route(self, route_uri, method, request, uri, headers): uri_path = route_uri if in uri: logger.debug( .format(self.__id, self.name)) uri_path, uri_qs = uri.split() logger.debug( .format(self.__id, self.name, uri_path, uri_qs)) for k, v in six.iteritems(self.routes): logger.debug( .format(self.__id, self.name, v[])) logger.debug( .format(self.__id, self.name, v[].pattern, uri_path)) if v[].match(uri_path): logger.debug( .format(self.__id, self.name, v[], method)) return v[](method, request, uri, headers) return (595, headers, .format(uri))
Try to handle the supplied request on the specified routing URI. :param route_uri: string - URI of the request :param method: string - HTTP Verb :param request: request object describing the HTTP request :param uri: URI of the reuqest :param headers: case-insensitive headers dict :returns: tuple - (int, dict, string) containing: int - the http response status code dict - the headers for the http response string - http string response
18,333
def decorate_client(api_client, func, name): client_attr = getattr(api_client, name) if not callable(client_attr): return client_attr return OperationDecorator(client_attr, functools.partial(func, name))
A helper for decorating :class:`bravado.client.SwaggerClient`. :class:`bravado.client.SwaggerClient` can be extended by creating a class which wraps all calls to it. This helper is used in a :func:`__getattr__` to check if the attr exists on the api_client. If the attr does not exist raise :class:`AttributeError`, if it exists and is not callable return it, and if it is callable return a partial function calling `func` with `name`. Example usage: .. code-block:: python class SomeClientDecorator(object): def __init__(self, api_client, ...): self.api_client = api_client # First arg should be suffiently unique to not conflict with any of # the kwargs def wrap_call(self, client_call_name, *args, **kwargs): ... def __getattr__(self, name): return decorate_client(self.api_client, self.wrap_call, name) :param api_client: the client which is being decorated :type api_client: :class:`bravado.client.SwaggerClient` :param func: a callable which accepts `name`, `*args`, `**kwargs` :type func: callable :param name: the attribute being accessed :type name: string :returns: the attribute from the `api_client` or a partial of `func` :raises: :class:`AttributeError`
18,334
def set_state(self, state=None, **kwargs): D = state or {} D.update(kwargs) for key, val in D.items(): if key not in self._state_props: raise KeyError( % key) setattr(self, key, val)
Set the view state of the camera Should be a dict (or kwargs) as returned by get_state. It can be an incomlete dict, in which case only the specified properties are set. Parameters ---------- state : dict The camera state. **kwargs : dict Unused keyword arguments.
18,335
def change_svc_snapshot_command(self, service, snapshot_command): service.modified_attributes |= DICT_MODATTR["MODATTR_EVENT_HANDLER_COMMAND"].value data = {"commands": self.commands, "call": snapshot_command} service.change_snapshot_command(data) self.send_an_element(service.get_update_status_brok())
Modify host snapshot command Format of the line that triggers function call:: CHANGE_HOST_SNAPSHOT_COMMAND;<host_name>;<event_handler_command> :param service: service to modify snapshot command :type service: alignak.objects.service.Service :param snapshot_command: snapshot command command line :type snapshot_command: :return: None
18,336
def blastparse(self): logging.info() for sample in self.runmetadata.samples: if sample.general.bestassemblyfile != : dbrecords = SeqIO.to_dict(SeqIO.parse(sample[self.analysistype].baitfile, )) if os.path.isfile(sample[self.analysistype].blastreport): sample[self.analysistype].frequency = dict() blastdict = DictReader(open(sample[self.analysistype].blastreport), fieldnames=self.fieldnames, dialect=) recorddict = dict() for record in blastdict: subject = record[] genus = dbrecords[subject].description.split()[-1].split()[0] try: sample[self.analysistype].frequency[genus] += 1 except KeyError: sample[self.analysistype].frequency[genus] = 1 try: recorddict[dbrecords[subject].description] += 1 except KeyError: recorddict[dbrecords[subject].description] = 1 sample[self.analysistype].sortedgenera = sorted(sample[self.analysistype].frequency.items(), key=operator.itemgetter(1), reverse=True) try: sample[self.analysistype].genus = sample[self.analysistype].sortedgenera[0][0] except IndexError: sample[self.analysistype].sortedgenera = sample[self.analysistype].genus = else: sample[self.analysistype].sortedgenera = sample[self.analysistype].genus = else: sample[self.analysistype].sortedgenera = sample[self.analysistype].genus =
Parse the blast results, and store necessary data in dictionaries in sample object
18,337
def _get_ctx(self): if self._WEB_CTX_KEY not in web.ctx: web.ctx[self._WEB_CTX_KEY] = { "javascript": {"footer": [], "header": []}, "css": []} return web.ctx.get(self._WEB_CTX_KEY)
Get web.ctx object for the Template helper
18,338
def get_resource(self, resource_key, **variables): handle = self.make_resource_handle(resource_key, **variables) return self.get_resource_from_handle(handle, verify_repo=False)
Get a resource. Attempts to get and return a cached version of the resource if available, otherwise a new resource object is created and returned. Args: resource_key (`str`): Name of the type of `Resources` to find variables: data to identify / store on the resource Returns: `PackageRepositoryResource` instance.
18,339
def sph2cart(r, az, elev): x = r * np.cos(az) * np.sin(elev) y = r * np.sin(az) * np.sin(elev) z = r * np.cos(elev) return x, y, z
Convert spherical to cartesian coordinates. Attributes ---------- r : float radius az : float aziumth (angle about z axis) elev : float elevation from xy plane Returns ------- float x-coordinate float y-coordinate float z-coordinate
18,340
def dataset_publication_finished(self, ignore_exception=False): self.__check_if_dataset_publication_allowed_right_now() self.__check_data_consistency(ignore_exception) self.__coupler.start_rabbit_business() self.__create_and_send_dataset_publication_message_to_queue() self.__send_existing_file_messages_to_queue() self.__coupler.done_with_rabbit_business() self.__set_machine_state_to_finished() loginfo(LOGGER, , self.__drs_id, self.__version_number, self.__data_node, self.__dataset_handle)
This is the "commit". It triggers the creation/update of handles. * Check if the set of files corresponds to the previously published set (if applicable, and if solr url given, and if solr replied) * The dataset publication message is created and sent to the queue. * All file publication messages are sent to the queue.
18,341
def prepare_payload(op, method, uri, data): query = [] if op is None else [("op", op)] def slurp(opener): with opener() as fd: return fd.read() if method == "GET": headers, body = [], None query.extend( (name, slurp(value) if callable(value) else value) for name, value in data) else: message = build_multipart_message(data) headers, body = encode_multipart_message(message) uri = urlparse(uri)._replace(query=urlencode(query)).geturl() return uri, body, headers
Return the URI (modified perhaps) and body and headers. - For GET requests, encode parameters in the query string. - Otherwise always encode parameters in the request body. - Except op; this can always go in the query string. :param method: The HTTP method. :param uri: The URI of the action. :param data: An iterable of ``name, value`` or ``name, opener`` tuples (see `name_value_pair`) to pack into the body or query, depending on the type of request.
18,342
def boxed(msg, ch="=", pad=5): if pad > 0: msg = pad * ch + " " + msg.strip() + " " + pad * ch return "\n".join([len(msg) * ch, msg, len(msg) * ch, ])
Returns a string in a box Args: msg: Input string. ch: Character used to form the box. pad: Number of characters ch added before and after msg. >>> print(boxed("hello", ch="*", pad=2)) *********** ** hello ** ***********
18,343
def get_results(self, metadata=False): results_data = [] self.process_har() self.process_from_splash() for rt in sorted(self._results.get_results()): rdict = {: rt.name} if rt.version: rdict[] = rt.version if metadata: rdict[] = rt.homepage rdict[] = rt.type rdict[] = rt.from_url rdict[] = rt.plugin results_data.append(rdict) return results_data
Return results of the analysis.
18,344
def _get_json(endpoint, params, referer=): h = dict(HEADERS) h[] = .format(ref=referer) _get = get(BASE_URL.format(endpoint=endpoint), params=params, headers=h) _get.raise_for_status() return _get.json()
Internal method to streamline our requests / json getting Args: endpoint (str): endpoint to be called from the API params (dict): parameters to be passed to the API Raises: HTTPError: if requests hits a status code != 200 Returns: json (json): json object for selected API call
18,345
def group_dict(self, group: str) -> Dict[str, Any]: return dict( (opt.name, opt.value()) for name, opt in self._options.items() if not group or group == opt.group_name )
The names and values of options in a group. Useful for copying options into Application settings:: from tornado.options import define, parse_command_line, options define('template_path', group='application') define('static_path', group='application') parse_command_line() application = Application( handlers, **options.group_dict('application')) .. versionadded:: 3.1
18,346
def apply_connectivity_changes(self, request): if request is None or request == "": raise Exception(self.__class__.__name__, "request is None or empty") holder = JsonRequestDeserializer(jsonpickle.decode(request)) if not holder or not hasattr(holder, "driverRequest"): raise Exception(self.__class__.__name__, "Deserialized request is None or empty") driver_response = DriverResponse() add_vlan_thread_list = [] remove_vlan_thread_list = [] driver_response_root = DriverResponseRoot() for action in holder.driverRequest.actions: self._logger.info("Action: ", action.__dict__) self._validate_request_action(action) action_id = action.actionId full_name = action.actionTarget.fullName port_mode = action.connectionParams.mode.lower() if action.type == "setVlan": qnq = False ctag = "" for attribute in action.connectionParams.vlanServiceAttributes: if attribute.attributeName.lower() == "qnq" and attribute.attributeValue.lower() == "true": qnq = True if attribute.attributeName.lower() == "ctag": ctag = attribute.attributeValue for vlan_id in self._get_vlan_list(action.connectionParams.vlanId): add_vlan_thread = Thread(target=self.add_vlan, name=action_id, args=(vlan_id, full_name, port_mode, qnq, ctag)) add_vlan_thread_list.append(add_vlan_thread) elif action.type == "removeVlan": for vlan_id in self._get_vlan_list(action.connectionParams.vlanId): remove_vlan_thread = Thread(target=self.remove_vlan, name=action_id, args=(vlan_id, full_name, port_mode,)) remove_vlan_thread_list.append(remove_vlan_thread) else: self._logger.warning("Undefined action type determined : {}".format(action.type, action.__dict__)) continue for thread in remove_vlan_thread_list: thread.start() for thread in remove_vlan_thread_list: thread.join() for thread in add_vlan_thread_list: thread.start() for thread in add_vlan_thread_list: thread.join() request_result = [] for action in holder.driverRequest.actions: result_statuses, message = zip(*self.result.get(action.actionId)) if all(result_statuses): action_result = ConnectivitySuccessResponse(action, "Add Vlan {vlan} configuration successfully completed" .format(vlan=action.connectionParams.vlanId)) else: message_details = "\n\t".join(message) action_result = ConnectivityErrorResponse(action, "Add Vlan {vlan} configuration failed." "\nAdd Vlan configuration details:\n{message_details}" .format(vlan=action.connectionParams.vlanId, message_details=message_details)) request_result.append(action_result) driver_response.actionResults = request_result driver_response_root.driverResponse = driver_response return serialize_to_json(driver_response_root)
Handle apply connectivity changes request json, trigger add or remove vlan methods, get responce from them and create json response :param request: json with all required action to configure or remove vlans from certain port :return Serialized DriverResponseRoot to json :rtype json
18,347
def get_client(client=None): global _client tmp_client = client is not None if not tmp_client: config = getattr(django_settings, "ELASTIC_APM", {}) client = config.get("CLIENT", default_client_class) if _client[0] != client: client_class = import_string(client) instance = client_class() if not tmp_client: _client = (client, instance) return instance return _client[1]
Get an ElasticAPM client. :param client: :return: :rtype: elasticapm.base.Client
18,348
def generate_password(self) -> list: characterset = self._get_password_characters() if ( self.passwordlen is None or not characterset ): raise ValueError("Cant set") password = [] for _ in range(0, self.passwordlen): password.append(randchoice(characterset)) self.last_result = password return password
Generate a list of random characters.
18,349
def _add_cloned_sers(self, plotArea, count): def clone_ser(ser): new_ser = deepcopy(ser) new_ser.idx.val = plotArea.next_idx new_ser.order.val = plotArea.next_order ser.addnext(new_ser) return new_ser last_ser = plotArea.last_ser for _ in range(count): last_ser = clone_ser(last_ser)
Add `c:ser` elements to the last xChart element in *plotArea*, cloned from the last `c:ser` child of that last xChart.
18,350
def connectAlt(cls, redisConnectionParams): if not isinstance(redisConnectionParams, dict): raise ValueError() hashVal = hashDictOneLevel(redisConnectionParams) modelDictCopy = copy.deepcopy(dict(cls.__dict__)) modelDictCopy[] = redisConnectionParams ConnectedIndexedRedisModel = type( + cls.__name__ + str(hashVal), cls.__bases__, modelDictCopy) return ConnectedIndexedRedisModel
connectAlt - Create a class of this model which will use an alternate connection than the one specified by REDIS_CONNECTION_PARAMS on this model. @param redisConnectionParams <dict> - Dictionary of arguments to redis.Redis, same as REDIS_CONNECTION_PARAMS. @return - A class that can be used in all the same ways as the existing IndexedRedisModel, but that connects to a different instance. The fields and key will be the same here, but the connection will be different. use #copyModel if you want an independent class for the model
18,351
def copy(self): s = self.to_json() cpy = parameter_from_json(s) cpy.set_uniqueid(_uniqueid()) return cpy
Deepcopy the parameter (with a new uniqueid). All other tags will remain the same... so some other tag should be changed before attaching back to a ParameterSet or Bundle. :return: the copied :class:`Parameter` object
18,352
def get_mode_group(self, group): hmodegroup = self._libinput.libinput_device_tablet_pad_get_mode_group( self._handle, group) if hmodegroup: return TabletPadModeGroup(hmodegroup, self._libinput) return None
While a reference is kept by the caller, the returned mode group will compare equal with mode group returned by each subsequent call of this method with the same index and mode group returned from :attr:`~libinput.event.TabletPadEvent.mode_group`, provided the event was generated by this mode group. Args: group (int): A mode group index. Returns: ~libinput.define.TabletPadModeGroup: The mode group with the given index or :obj:`None` if an invalid index is given.
18,353
def load_metadata_for_topics(self, *topics): topics = [kafka_bytestring(t) for t in topics] if topics: for topic in topics: self.reset_topic_metadata(topic) else: self.reset_all_metadata() resp = self.send_metadata_request(topics) log.debug(, resp.brokers) log.debug(, resp.topics) self.brokers = dict([(broker.nodeId, broker) for broker in resp.brokers]) for topic_metadata in resp.topics: topic = topic_metadata.topic partitions = topic_metadata.partitions try: kafka_common.check_error(topic_metadata) except (UnknownTopicOrPartitionError, LeaderNotAvailableError) as e: if topic in topics: raise log.error(, topic, type(e)) continue self.topic_partitions[topic] = {} for partition_metadata in partitions: partition = partition_metadata.partition leader = partition_metadata.leader self.topic_partitions[topic][partition] = partition_metadata topic_part = TopicAndPartition(topic, partition) try: kafka_common.check_error(partition_metadata) except LeaderNotAvailableError: log.error(, topic, partition) self.topics_to_brokers[topic_part] = None continue except ReplicaNotAvailableError: log.debug(, topic, partition) if leader in self.brokers: self.topics_to_brokers[topic_part] = self.brokers[leader] else: self.topics_to_brokers[topic_part] = BrokerMetadata( leader, None, None )
Fetch broker and topic-partition metadata from the server, and update internal data: broker list, topic/partition list, and topic/parition -> broker map This method should be called after receiving any error Arguments: *topics (optional): If a list of topics is provided, the metadata refresh will be limited to the specified topics only. Exceptions: ---------- If the broker is configured to not auto-create topics, expect UnknownTopicOrPartitionError for topics that don't exist If the broker is configured to auto-create topics, expect LeaderNotAvailableError for new topics until partitions have been initialized. Exceptions *will not* be raised in a full refresh (i.e. no topic list) In this case, error codes will be logged as errors Partition-level errors will also not be raised here (a single partition w/o a leader, for example)
18,354
def ldirectory(inpath, outpath, args, scope): yacctab = if args.debug else None if not outpath: sys.exit("Compile directory option needs -o ...") else: if not os.path.isdir(outpath): if args.verbose: print("Creating " % outpath, file=sys.stderr) if not args.dry_run: os.mkdir(outpath) less = glob.glob(os.path.join(inpath, )) f = formatter.Formatter(args) for lf in less: outf = os.path.splitext(os.path.basename(lf)) minx = if args.min_ending else outf = "%s/%s%s.css" % (outpath, outf[0], minx) if not args.force and os.path.exists(outf): recompile = os.path.getmtime(outf) < os.path.getmtime(lf) else: recompile = True if recompile: print( % (lf, outf)) p = parser.LessParser( yacc_debug=(args.debug), lex_optimize=True, yacc_optimize=(not args.debug), scope=scope, tabfile=yacctab, verbose=args.verbose) p.parse(filename=lf, debuglevel=0) css = f.format(p) if not args.dry_run: with open(outf, ) as outfile: outfile.write(css) elif args.verbose: print( % lf, file=sys.stderr) sys.stdout.flush() if args.recurse: [ ldirectory( os.path.join(inpath, name), os.path.join(outpath, name), args, scope) for name in os.listdir(inpath) if os.path.isdir(os.path.join(inpath, name)) and not name.startswith() and not name == outpath ]
Compile all *.less files in directory Args: inpath (str): Path to compile outpath (str): Output directory args (object): Argparse Object scope (Scope): Scope object or None
18,355
def close(self): if not (yield from super().close()): return False adapters = self._ethernet_adapters + self._serial_adapters for adapter in adapters: if adapter is not None: for nio in adapter.ports.values(): if nio and isinstance(nio, NIOUDP): self.manager.port_manager.release_udp_port(nio.lport, self._project) yield from self.stop()
Closes this IOU VM.
18,356
def waitPuppetCatalogToBeApplied(self, key, sleepTime=5): loop_stop = False while not loop_stop: status = self[key].getStatus() if status == or status == : self.__printProgression__(True, key + ) loop_stop = True elif status == : self.__printProgression__(False, key + ) loop_stop = True return False else: self.__printProgression__(, key + .format(status), eol=) time.sleep(sleepTime)
Function waitPuppetCatalogToBeApplied Wait for puppet catalog to be applied @param key: The host name or ID @return RETURN: None
18,357
def get_resource(request): hash = request.matchdict[] with db_connect() as db_connection: with db_connection.cursor() as cursor: args = dict(hash=hash) cursor.execute(SQL[], args) try: mimetype, file = cursor.fetchone() except TypeError: raise httpexceptions.HTTPNotFound() resp = request.response resp.status = "200 OK" resp.content_type = mimetype resp.body = file[:] return resp
Retrieve a file's data.
18,358
def watched_file_handler(name, logname, filename, mode=, encoding=None, delay=False): return wrap_log_handler(logging.handlers.WatchedFileHandler( filename, mode=mode, encoding=encoding, delay=delay))
A Bark logging handler logging output to a named file. If the file has changed since the last log message was written, it will be closed and reopened. Similar to logging.handlers.WatchedFileHandler.
18,359
def unregister(self, cleanup_mode): if not isinstance(cleanup_mode, CleanupMode): raise TypeError("cleanup_mode can only be an instance of type CleanupMode") media = self._call("unregister", in_p=[cleanup_mode]) media = [IMedium(a) for a in media] return media
Unregisters a machine previously registered with :py:func:`IVirtualBox.register_machine` and optionally do additional cleanup before the machine is unregistered. This method does not delete any files. It only changes the machine configuration and the list of registered machines in the VirtualBox object. To delete the files which belonged to the machine, including the XML file of the machine itself, call :py:func:`delete_config` , optionally with the array of IMedium objects which was returned from this method. How thoroughly this method cleans up the machine configuration before unregistering the machine depends on the @a cleanupMode argument. With "UnregisterOnly", the machine will only be unregistered, but no additional cleanup will be performed. The call will fail if the machine is in "Saved" state or has any snapshots or any media attached (see :py:class:`IMediumAttachment` ). It is the responsibility of the caller to delete all such configuration in this mode. In this mode, the API behaves like the former @c IVirtualBox::unregisterMachine() API which it replaces. With "DetachAllReturnNone", the call will succeed even if the machine is in "Saved" state or if it has snapshots or media attached. All media attached to the current machine state or in snapshots will be detached. No medium objects will be returned; all of the machine's media will remain open. With "DetachAllReturnHardDisksOnly", the call will behave like with "DetachAllReturnNone", except that all the hard disk medium objects which were detached from the machine will be returned as an array. This allows for quickly passing them to the :py:func:`delete_config` API for closing and deletion. With "Full", the call will behave like with "DetachAllReturnHardDisksOnly", except that all media will be returned in the array, including removable media like DVDs and floppies. This might be useful if the user wants to inspect in detail which media were attached to the machine. Be careful when passing the media array to :py:func:`delete_config` in that case because users will typically want to preserve ISO and RAW image files. A typical implementation will use "DetachAllReturnHardDisksOnly" and then pass the resulting IMedium array to :py:func:`delete_config` . This way, the machine is completely deleted with all its saved states and hard disk images, but images for removable drives (such as ISO and RAW files) will remain on disk. This API does not verify whether the media files returned in the array are still attached to other machines (i.e. shared between several machines). If such a shared image is passed to :py:func:`delete_config` however, closing the image will fail there and the image will be silently skipped. This API may, however, move media from this machine's media registry to other media registries (see :py:class:`IMedium` for details on media registries). For machines created with VirtualBox 4.0 or later, if media from this machine's media registry are also attached to another machine (shared attachments), each such medium will be moved to another machine's registry. This is because without this machine's media registry, the other machine cannot find its media any more and would become inaccessible. This API implicitly calls :py:func:`save_settings` to save all current machine settings before unregistering it. It may also silently call :py:func:`save_settings` on other machines if media are moved to other machines' media registries. After successful method invocation, the :py:class:`IMachineRegisteredEvent` event is fired. The call will fail if the machine is currently locked (see :py:class:`ISession` ). If the given machine is inaccessible (see :py:func:`accessible` ), it will be unregistered and fully uninitialized right afterwards. As a result, the returned machine object will be unusable and an attempt to call **any** method will return the "Object not ready" error. in cleanup_mode of type :class:`CleanupMode` How to clean up after the machine has been unregistered. return media of type :class:`IMedium` List of media detached from the machine, depending on the @a cleanupMode parameter. raises :class:`VBoxErrorInvalidObjectState` Machine is currently locked for a session.
18,360
def errcat(self): post_recs = MPost.query_random(limit=1000) outrecs = [] errrecs = [] idx = 0 for postinfo in post_recs: if idx > 16: break cat = MPost2Catalog.get_first_category(postinfo.uid) if cat: if in postinfo.extinfo: if postinfo.extinfo[] == cat.tag_id: pass else: errrecs.append(postinfo) idx += 1 else: errrecs.append(postinfo) idx += 1 else: outrecs.append(postinfo) idx += 1 self.render(, kwd={}, norecs=outrecs, errrecs=errrecs, userinfo=self.userinfo)
List the posts to be modified.
18,361
def draw_variable(loc, scale, shape, skewness, nsims): return ss.cauchy.rvs(loc, scale, nsims)
Draws random variables from this distribution Parameters ---------- loc : float location parameter for the distribution scale : float scale parameter for the distribution shape : float tail thickness parameter for the distribution skewness : float skewness parameter for the distribution nsims : int or list number of draws to take from the distribution Returns ---------- - Random draws from the distribution
18,362
def object_patch_rm_link(self, root, link, **kwargs): args = ((root, link),) return self._client.request(, args, decoder=, **kwargs)
Creates a new merkledag object based on an existing one. The new object will lack a link to the specified object. .. code-block:: python >>> c.object_patch_rm_link( ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', ... 'Johnny' ... ) {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} Parameters ---------- root : str IPFS hash of the object to modify link : str name of the link to remove Returns ------- dict : Hash of new object
18,363
def twilight(self, direction=SUN_RISING, date=None, local=True, use_elevation=True): if local and self.timezone is None: raise ValueError("Local time requested but Location has no timezone set.") if date is None: date = datetime.date.today() elevation = self.elevation if use_elevation else 0 start, end = self.astral.twilight_utc( direction, date, self.latitude, self.longitude, elevation ) if local: return start.astimezone(self.tz), end.astimezone(self.tz) else: return start, end
Returns the start and end times of Twilight in the UTC timezone when the sun is traversing in the specified direction. This method defines twilight as being between the time when the sun is at -6 degrees and sunrise/sunset. :param direction: Determines whether the time is for the sun rising or setting. Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. :type direction: int :param date: The date for which to calculate the times. :type date: :class:`datetime.date` :param local: True = Time to be returned in location's time zone; False = Time to be returned in UTC. If not specified then the time will be returned in local time :type local: bool :param use_elevation: True = Return times that allow for the location's elevation; False = Return times that don't use elevation. If not specified then times will take elevation into account. :type use_elevation: bool :return: A tuple of the UTC date and time at which twilight starts and ends. :rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`)
18,364
def hash_sha256(self): fp_plain = hashlib.sha256(self._decoded_key).digest() return (b"SHA256:" + base64.b64encode(fp_plain).replace(b"=", b"")).decode("utf-8")
Calculate sha256 fingerprint.
18,365
def dumps(*args, **kwargs): import json from django.conf import settings from argonauts.serializers import JSONArgonautsEncoder kwargs.setdefault(, JSONArgonautsEncoder) if settings.DEBUG: kwargs.setdefault(, 4) kwargs.setdefault(, (, )) else: kwargs.setdefault(, (, )) return json.dumps(*args, **kwargs)
Wrapper for json.dumps that uses the JSONArgonautsEncoder.
18,366
def _highlightBracket(self, bracket, qpart, block, columnIndex): try: matchedBlock, matchedColumnIndex = self._findMatchingBracket(bracket, qpart, block, columnIndex) except _TimeoutException: return[] if matchedBlock is not None: self.currentMatchedBrackets = ((block, columnIndex), (matchedBlock, matchedColumnIndex)) return [self._makeMatchSelection(block, columnIndex, True), self._makeMatchSelection(matchedBlock, matchedColumnIndex, True)] else: self.currentMatchedBrackets = None return [self._makeMatchSelection(block, columnIndex, False)]
Highlight bracket and matching bracket Return tuple of QTextEdit.ExtraSelection's
18,367
def exclude_from(l, containing = [], equal_to = []): cont = lambda li: any(c in li for c in containing) eq = lambda li: any(e == li for e in equal_to) return [li for li in l if not (cont(li) or eq(li))]
Exclude elements in list l containing any elements from list ex. Example: >>> l = ['bob', 'r', 'rob\r', '\r\nrobert'] >>> containing = ['\n', '\r'] >>> equal_to = ['r'] >>> exclude_from(l, containing, equal_to) ['bob']
18,368
def do_gate(self, gate: Gate): gate_matrix, qubit_inds = _get_gate_tensor_and_qubits(gate=gate) self.wf = targeted_tensordot(gate=gate_matrix, wf=self.wf, wf_target_inds=qubit_inds) return self
Perform a gate. :return: ``self`` to support method chaining.
18,369
def parent_org_sdo_ids(self): return [sdo.get_owner()._narrow(SDOPackage.SDO).get_sdo_id() \ for sdo in self._obj.get_organizations() if sdo]
The SDO IDs of the compositions this RTC belongs to.
18,370
def star(self): warnings.warn( "Deprecated! Update Taiga and use .like() instead", DeprecationWarning ) self.requester.post( , endpoint=self.endpoint, id=self.id ) return self
Stars the project .. deprecated:: 0.8.5 Update Taiga and use like instead
18,371
def _post_read_flds(flds, header): if flds.shape[0] >= 3 and header[] > 0: header[] = np.roll( np.arctan2(header[], header[]), -1, 1) for ibk in range(header[]): flds[..., ibk] = _to_spherical(flds[..., ibk], header) header[] = np.roll( np.arctan2(header[], -header[]) + np.pi, -1, 1) return flds
Process flds to handle sphericity.
18,372
def shutdown(self): if self.lifecycle.is_live: self.lifecycle.fire_lifecycle_event(LIFECYCLE_STATE_SHUTTING_DOWN) self.near_cache_manager.destroy_all_near_caches() self.statistics.shutdown() self.partition_service.shutdown() self.heartbeat.shutdown() self.cluster.shutdown() self.reactor.shutdown() self.lifecycle.fire_lifecycle_event(LIFECYCLE_STATE_SHUTDOWN) self.logger.info("Client shutdown.", extra=self._logger_extras)
Shuts down this HazelcastClient.
18,373
def wrap_args_with_process_isolation(self, args): cwd = os.path.realpath(self.cwd) pi_temp_dir = self.build_process_isolation_temp_dir() new_args = [self.process_isolation_executable or , , , , , , ] for path in sorted(set(self.process_isolation_hide_paths or [])): if not os.path.exists(path): logger.debug(.format(path)) continue path = os.path.realpath(path) if os.path.isdir(path): new_path = tempfile.mkdtemp(dir=pi_temp_dir) os.chmod(new_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) else: handle, new_path = tempfile.mkstemp(dir=pi_temp_dir) os.close(handle) os.chmod(new_path, stat.S_IRUSR | stat.S_IWUSR) new_args.extend([, .format(new_path), .format(path)]) if self.private_data_dir: show_paths = [self.private_data_dir] else: show_paths = [cwd] for path in sorted(set(self.process_isolation_ro_paths or [])): if not os.path.exists(path): logger.debug(.format(path)) continue path = os.path.realpath(path) new_args.extend([, .format(path), .format(path)]) show_paths.extend(self.process_isolation_show_paths or []) for path in sorted(set(show_paths)): if not os.path.exists(path): logger.debug(.format(path)) continue path = os.path.realpath(path) new_args.extend([, .format(path), .format(path)]) if self.execution_mode == ExecutionMode.ANSIBLE_PLAYBOOK: if self.directory_isolation_path is not None: new_args.extend([, os.path.realpath(self.directory_isolation_path)]) else: new_args.extend([, self.project_dir]) elif self.execution_mode == ExecutionMode.ANSIBLE: new_args.extend([, os.path.realpath(self.private_data_dir)]) new_args.extend(args) return new_args
Wrap existing command line with bwrap to restrict access to: - self.process_isolation_path (generally, /tmp) (except for own /tmp files)
18,374
def generate(self, src=None, identifier=None): self.src = src self.identifier = identifier im = self.manipulator_klass() im.srcfile = self.src im.set_max_image_pixels(self.max_image_pixels) im.do_first() width = im.width height = im.height scale_factors = im.scale_factors(self.tilesize) self.setup_destination() for (region, size) in static_partial_tile_sizes(width, height, self.tilesize, scale_factors): self.generate_tile(region, size) sizes = [] for size in static_full_sizes(width, height, self.tilesize): sizes.append({: size[0], : size[1]}) self.generate_tile(, size) for request in self.extras: request.identifier = self.identifier if (request.is_scaled_full_image()): sizes.append({: request.size_wh[0], : request.size_wh[1]}) self.generate_file(request) qualities = [] if (self.api_version > ) else [] info = IIIFInfo(level=0, server_and_prefix=self.prefix, identifier=self.identifier, width=width, height=height, scale_factors=scale_factors, tile_width=self.tilesize, tile_height=self.tilesize, formats=[], qualities=qualities, sizes=sizes, api_version=self.api_version) json_file = os.path.join(self.dst, self.identifier, ) if (self.dryrun): self.logger.warning( "dryrun mode, would write the following files:") self.logger.warning("%s / %s/%s" % (self.dst, self.identifier, )) else: with open(json_file, ) as f: f.write(info.as_json()) f.close() self.logger.info("%s / %s/%s" % (self.dst, self.identifier, )) self.logger.debug("Written %s" % (json_file))
Generate static files for one source image.
18,375
def add_tokens_for_group(self, with_pass=False): kls = self.groups.super_kls name = self.groups.kls_name self.reset_indentation() self.result.extend(self.tokens.make_describe(kls, name)) if with_pass: self.add_tokens_for_pass() self.groups.finish_signature()
Add the tokens for the group signature
18,376
async def get_entity(self): if not self.entity and await self.get_input_entity(): try: self._entity =\ await self._client.get_entity(self._input_entity) except ValueError: pass return self._entity
Returns `entity` but will make an API call if necessary.
18,377
def save(self, *args, **kwargs): writer = None if in kwargs.keys(): writer = kwargs.pop() else: writer = default_writer super().save(*args, **kwargs, writer=writer)
Save animation into a movie file. [NOTE] If 'writer' is not specified, default writer defined in this module will be used to generate the movie file. [TODO] Implement docstring inheritance.
18,378
def Draw(self, *args, **kwargs): self.reset() output = None while self._rollover(): if output is None: output = self._tree.Draw(*args, **kwargs) if output is not None: output = output.Clone() if hasattr(output, ): output.SetDirectory(0) else: newoutput = self._tree.Draw(*args, **kwargs) if newoutput is not None: if isinstance(output, _GraphBase): output.Append(newoutput) else: output += newoutput return output
Loop over subfiles, draw each, and sum the output into a single histogram.
18,379
def _map_tril_1d_on_2d(indices, dims): N = (dims * dims - dims) / 2 m = np.ceil(np.sqrt(2 * N)) c = m - np.round(np.sqrt(2 * (N - indices))) - 1 r = np.mod(indices + (c + 1) * (c + 2) / 2 - 1, m) + 1 return np.array([r, c], dtype=np.int64)
Map 1d indices on lower triangular matrix in 2d.
18,380
def _set_ghost_ios(self, vm): if not vm.mmap: raise DynamipsError("mmap support is required to enable ghost IOS support") if vm.platform == "c7200" and vm.npe == "npe-g2": log.warning("Ghost IOS is not supported for c7200 with NPE-G2") return ghost_file = vm.formatted_ghost_file() module_workdir = vm.project.module_working_directory(self.module_name.lower()) ghost_file_path = os.path.join(module_workdir, ghost_file) if ghost_file_path not in self._ghost_files: ghost_id = str(uuid4()) ghost = Router("ghost-" + ghost_file, ghost_id, vm.project, vm.manager, platform=vm.platform, hypervisor=vm.hypervisor, ghost_flag=True) try: yield from ghost.create() yield from ghost.set_image(vm.image) yield from ghost.set_ghost_status(1) yield from ghost.set_ghost_file(ghost_file_path) yield from ghost.set_ram(vm.ram) try: yield from ghost.start() yield from ghost.stop() self._ghost_files.add(ghost_file_path) except DynamipsError: raise finally: yield from ghost.clean_delete() except DynamipsError as e: log.warn("Could not create ghost instance: {}".format(e)) if vm.ghost_file != ghost_file and os.path.isfile(ghost_file_path): yield from vm.set_ghost_status(2) yield from vm.set_ghost_file(ghost_file_path)
Manages Ghost IOS support. :param vm: VM instance
18,381
def request_verification(self, user, identity): return UserIdentityRequest(self).put(self.endpoint.request_verification, user, identity)
Sends the user a verification email with a link to verify ownership of the email address. :param user: User id or object :param identity: Identity id or object :return: requests Response object
18,382
def get_item(self, path, project=None, file_name=None, download=None, scope_path=None, recursion_level=None, version_descriptor=None, include_content=None): route_values = {} if project is not None: route_values[] = self._serialize.url(, project, ) query_parameters = {} if path is not None: query_parameters[] = self._serialize.query(, path, ) if file_name is not None: query_parameters[] = self._serialize.query(, file_name, ) if download is not None: query_parameters[] = self._serialize.query(, download, ) if scope_path is not None: query_parameters[] = self._serialize.query(, scope_path, ) if recursion_level is not None: query_parameters[] = self._serialize.query(, recursion_level, ) if version_descriptor is not None: if version_descriptor.version_option is not None: query_parameters[] = version_descriptor.version_option if version_descriptor.version_type is not None: query_parameters[] = version_descriptor.version_type if version_descriptor.version is not None: query_parameters[] = version_descriptor.version if include_content is not None: query_parameters[] = self._serialize.query(, include_content, ) response = self._send(http_method=, location_id=, version=, route_values=route_values, query_parameters=query_parameters) return self._deserialize(, response)
GetItem. Get Item Metadata and/or Content for a single item. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download. :param str path: Version control path of an individual item to return. :param str project: Project ID or project name :param str file_name: file name of item returned. :param bool download: If true, create a downloadable attachment. :param str scope_path: Version control path of a folder to return multiple items. :param str recursion_level: None (just the item), or OneLevel (contents of a folder). :param :class:`<TfvcVersionDescriptor> <azure.devops.v5_0.tfvc.models.TfvcVersionDescriptor>` version_descriptor: Version descriptor. Default is null. :param bool include_content: Set to true to include item content when requesting json. Default is false. :rtype: :class:`<TfvcItem> <azure.devops.v5_0.tfvc.models.TfvcItem>`
18,383
def _update_geography(self, countries, regions, cities, city_country_mapping): existing = { : list(City.objects.values_list(, flat=True)), : list(Region.objects.values(, )), : Country.objects.values_list(, flat=True) } for country_code in countries: if country_code not in existing[]: Country.objects.create(code=country_code, name=ISO_CODES.get(country_code, country_code)) for entry in regions: if entry not in existing[]: Region.objects.create(name=entry[], country_id=entry[]) for entry in cities: if int(entry[]) not in existing[]: code = city_country_mapping.get(entry[]) if code: region = Region.objects.get(name=entry[], country__code=code) City.objects.create(id=entry[], name=entry[], region=region, latitude=entry.get(), longitude=entry.get())
Update database with new countries, regions and cities
18,384
def thorium(opts, functions, runners): pack = {: functions, : runners, : {}} ret = LazyLoader(_module_dirs(opts, ), opts, tag=, pack=pack) ret.pack[] = ret return ret
Load the thorium runtime modules
18,385
def accept(self): accept = self.headers.get() if not accept or accept == _content_types.ANY: return self._default_accept else: return accept
The content-type for the response to the client. Returns: (str): The value of the header 'Accept' or the user-supplied SAGEMAKER_DEFAULT_INVOCATIONS_ACCEPT environment variable.
18,386
def sample_indexes(segyfile, t0=0.0, dt_override=None): if dt_override is None: dt_override = dt(segyfile) return [t0 + t * dt_override for t in range(len(segyfile.samples))]
Creates a list of values representing the samples in a trace at depth or time. The list starts at *t0* and is incremented with am*dt* for the number of samples. If a *dt_override* is not provided it will try to find a *dt* in the file. Parameters ---------- segyfile : segyio.SegyFile t0 : float initial sample, or delay-recording-time dt_override : float or None Returns ------- samples : array_like of float Notes ----- .. versionadded:: 1.1
18,387
def distributive(self): dual = self.dual args = list(self.args) for i, arg in enumerate(args): if isinstance(arg, dual): args[i] = arg.args else: args[i] = (arg,) prod = itertools.product(*args) args = tuple(self.__class__(*arg).simplify() for arg in prod) if len(args) == 1: return args[0] else: return dual(*args)
Return a term where the leading AND or OR terms are switched. This is done by applying the distributive laws: A & (B|C) = (A&B) | (A&C) A | (B&C) = (A|B) & (A|C)
18,388
def get_location(self, location_id: int, timeout: int=None): url = self.api.LOCATIONS + + str(location_id) return self._get_model(url, timeout=timeout)
Get a location information Parameters ---------- location_id: int A location ID See https://github.com/RoyaleAPI/cr-api-data/blob/master/json/regions.json for a list of acceptable location IDs timeout: Optional[int] = None Custom timeout that overwrites Client.timeout
18,389
def exec_command( client, container, command, interactive=True, stdout=None, stderr=None, stdin=None): exec_id = exec_create(client, container, command, interactive=interactive) operation = ExecOperation(client, exec_id, interactive=interactive, stdout=stdout, stderr=stderr, stdin=stdin) PseudoTerminal(client, operation).start()
Run provided command via exec API in provided container. This is just a wrapper for PseudoTerminal(client, container).exec_command()
18,390
def inis2dict(ini_paths: Union[str, Sequence[str]]) -> dict: var_dflt = r def _interpolate(content): rv = expandvars(content) while True: match = re.search(var_dflt, rv) if match is None: break bash_var = .format(match.group(1)) value = expandvars(bash_var) rv = re.sub(var_dflt, match.group(2) if value == bash_var else value, rv, count=1) return rv parser = ConfigParser() for ini in [ini_paths] if isinstance(ini_paths, str) else ini_paths: if not isfile(ini): raise FileNotFoundError(.format(ini)) with open(ini, ) as ini_fh: ini_text = _interpolate(ini_fh.read()) parser.read_string(ini_text) return {s: dict(parser[s].items()) for s in parser.sections()}
Take one or more ini files and return a dict with configuration from all, interpolating bash-style variables ${VAR} or ${VAR:-DEFAULT}. :param ini_paths: path or paths to .ini files
18,391
def get_terminal_size(): def read_terminal_size_by_ioctl(fd): try: import struct, fcntl, termios cr = struct.unpack(, fcntl.ioctl(1, termios.TIOCGWINSZ, )) except ImportError: return None except IOError as e: return None return cr[1], cr[0] cr = read_terminal_size_by_ioctl(0) or \ read_terminal_size_by_ioctl(1) or \ read_terminal_size_by_ioctl(2) if not cr: try: import os fd = os.open(os.ctermid(), os.O_RDONLY) cr = read_terminal_size_by_ioctl(fd) os.close(fd) except: pass if not cr: import os cr = [80, 25] if os.getenv(): cr[1] = int(os.getenv()) if os.getenv(): cr[0] = int(os.getenv()) return cr[1], cr[0]
Finds the width of the terminal, or returns a suitable default value.
18,392
def authenticate(self, email=None, password=None): if self.session.authenticate(email, password): return True
Attempt to authenticate the user. Parameters ---------- email : string The email of a user on Lending Club password : string The user's password, for authentication. Returns ------- boolean True if the user authenticated or raises an exception if not Raises ------ session.AuthenticationError If authentication failed session.NetworkError If a network error occurred
18,393
def input_from_history(a, n, bias=False): if not type(n) == int: raise ValueError() if not n > 0: raise ValueError() try: a = np.array(a, dtype="float64") except: raise ValueError() x = np.array([a[i:i+n] for i in range(len(a)-n+1)]) if bias: x = np.vstack((x.T, np.ones(len(x)))).T return x
This is function for creation of input matrix. **Args:** * `a` : series (1 dimensional array) * `n` : size of input matrix row (int). It means how many samples \ of previous history you want to use \ as the filter input. It also represents the filter length. **Kwargs:** * `bias` : decides if the bias is used (Boolean). If True, \ array of all ones is appended as a last column to matrix `x`. \ So matrix `x` has `n`+1 columns. **Returns:** * `x` : input matrix (2 dimensional array) \ constructed from an array `a`. The length of `x` \ is calculated as length of `a` - `n` + 1. \ If the `bias` is used, then the amount of columns is `n` if not then \ amount of columns is `n`+1).
18,394
def template_scheduler_yaml(cl_args, masters): single_master = masters[0] scheduler_config_actual = "%s/standalone/scheduler.yaml" % cl_args["config_path"] scheduler_config_template = "%s/standalone/templates/scheduler.template.yaml" \ % cl_args["config_path"] template_file(scheduler_config_template, scheduler_config_actual, {"<scheduler_uri>": "http://%s:4646" % single_master})
Template scheduler.yaml
18,395
def _addConfig(instance, config, parent_section): try: section_name = "{p}/{n}".format(p = parent_section, n=instance.NAME.lower()) config.add_section(section_name) for k in instance.CONFIG.keys(): config.set(section_name, k, instance.CONFIG[k]) except Exception as e: print "[!] %s" % e
Writes a section for a plugin. Args: instance (object): Class instance for plugin config (object): Object (ConfigParser) which the current config parent_section (str): Parent section for plugin. Usually 'checkers' or 'reports'
18,396
def process_line(self, idx, line): if in line: py_line = + line[3:] if py_line.find() > 0: self.enum_comment[py_line[py_line.find():py_line.find()]] = % py_line[py_line.find() + 3:-1] else: self.tmp_comment = .format(line[3:-1]) elif in line: content = line.split() constant = content[1] if len(content) > 2: value = content[-1][:-1] py_line = % (constant, value) else: py_line = if py_line: if len(value) > 3: self.define.append("{2}{0} = {1},".format(constant, value[1:-1], self.tmp_comment)) else: self.define.append("{2}{0} = (byte){1},".format(constant, value, self.tmp_comment)) elif in line: py_line = self.process_typedef(line) if line.find() > 0 and line.find() < 0: key = line.split()[2][6:-2] enum_line = self.enum_comment[key] enum_line += % key for l in self.define: enum_line += % l enum_line += if enum_line.find("(byte)") < 0: enum_line = enum_line.replace(, ) self.fenum.write(enum_line) self.define.clear() elif line == : py_line = line else: py_line = return py_line
处理每行
18,397
def _cast_field(self, cast_to, value): if cast_to in (int, long, str): return cast_to(value) elif cast_to == unicode: try: value = value.decode(self.charset, self.errors) except UnicodeEncodeError, e: raise InvalidData("Error encoding unicode value : %s" % (repr(value), e)) return value elif cast_to in (any, bytes): return value else: raise TypeError("Invalid field type %s" % (cast_to))
Convert field type from raw bytes to native python type :param cast_to: native python type to cast to :type cast_to: a type object (one of bytes, int, unicode (str for py3k)) :param value: raw value from the database :type value: bytes :return: converted value :rtype: value of native python type (one of bytes, int, unicode (str for py3k))
18,398
def _periodically_flush_profile_events(self): while True: self.threads_stopped.wait(timeout=1) if self.threads_stopped.is_set(): return self.flush_profile_data()
Drivers run this as a thread to flush profile data in the background.
18,399
def _add_months(p_sourcedate, p_months): month = p_sourcedate.month - 1 + p_months year = p_sourcedate.year + month // 12 month = month % 12 + 1 day = min(p_sourcedate.day, calendar.monthrange(year, month)[1]) return date(year, month, day)
Adds a number of months to the source date. Takes into account shorter months and leap years and such. https://stackoverflow.com/questions/4130922/how-to-increment-datetime-month-in-python