Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
17,700
def build_kitchen_sink(): from sendgrid.helpers.mail import ( Mail, From, To, Cc, Bcc, Subject, PlainTextContent, HtmlContent, SendGridException, Substitution, Header, CustomArg, SendAt, Content, MimeType, Attachment, FileName, FileContent, FileType, Disposition, ContentId, TemplateId, Section, ReplyTo, Category, BatchId, Asm, GroupId, GroupsToDisplay, IpPoolName, MailSettings, BccSettings, BccSettingsEmail, BypassListManagement, FooterSettings, FooterText, FooterHtml, SandBoxMode, SpamCheck, SpamThreshold, SpamUrl, TrackingSettings, ClickTracking, SubscriptionTracking, SubscriptionText, SubscriptionHtml, SubscriptionSubstitutionTag, OpenTracking, OpenTrackingSubstitutionTag, Ganalytics, UtmSource, UtmMedium, UtmTerm, UtmContent, UtmCampaign) import time import datetime message = Mail() message.to = To(, , p=0) message.to = [ To(, , p=0), To(, , p=0) ] message.cc = Cc(, , p=0) message.cc = [ Cc(, , p=0), Cc(, , p=0) ] message.bcc = Bcc(, , p=0) message.bcc = [ Bcc(, , p=0), Bcc(, , p=0) ] message.subject = Subject(, p=0) message.header = Header(, , p=0) message.header = Header(, , p=0) message.header = [ Header(, , p=0), Header(, , p=0) ] message.substitution = Substitution(, , p=0) message.substitution = Substitution(, , p=0) message.substitution = [ Substitution(, , p=0), Substitution(, , p=0) ] message.custom_arg = CustomArg(, , p=0) message.custom_arg = CustomArg(, , p=0) message.custom_arg = [ CustomArg(, , p=0), CustomArg(, , p=0) ] message.send_at = SendAt(1461775051, p=0) message.to = To(, , p=1) message.to = [ To(, , p=1), To(, , p=1) ] message.cc = Cc(, , p=1) message.cc = [ Cc(, , p=1), Cc(, , p=1) ] message.bcc = Bcc(, , p=1) message.bcc = [ Bcc(, , p=1), Bcc(, , p=1) ] message.header = Header(, , p=1) message.header = Header(, , p=1) message.header = [ Header(, , p=1), Header(, , p=1) ] message.substitution = Substitution(, , p=1) message.substitution = Substitution(, , p=1) message.substitution = [ Substitution(, , p=1), Substitution(, , p=1) ] message.custom_arg = CustomArg(, , p=1) message.custom_arg = CustomArg(, , p=1) message.custom_arg = [ CustomArg(, , p=1), CustomArg(, , p=1) ] message.send_at = SendAt(1461775052, p=1) message.subject = Subject(, p=1) message.from_email = From(, ) message.reply_to = ReplyTo(, ) message.subject = Subject() message.content = Content(MimeType.text, ) message.content = Content(MimeType.html, ) message.content = [ Content(, ), Content(, ) ] message.attachment = Attachment(FileContent(), FileType(), FileName(), Disposition(), ContentId()) message.attachment = [ Attachment(FileContent(), FileType(), FileName(), Disposition(), ContentId()), Attachment(FileContent(), FileType(), FileName(), Disposition(), ContentId()) ] message.template_id = TemplateId() message.section = Section(, ) message.section = [ Section(, ), Section(, ) ] message.header = Header(, ) message.header = Header(, ) message.header = [ Header(, ), Header(, ) ] message.category = Category() message.category = Category() message.category = [ Category(), Category() ] message.custom_arg = CustomArg(, ) message.custom_arg = CustomArg(, ) message.custom_arg = [ CustomArg(, ), CustomArg(, ) ] message.send_at = SendAt(1461775053) message.batch_id = BatchId("HkJ5yLYULb7Rj8GKSx7u025ouWVlMgAi") message.asm = Asm(GroupId(1), GroupsToDisplay([1,2,3,4])) message.ip_pool_name = IpPoolName("IP Pool Name") mail_settings = MailSettings() mail_settings.bcc_settings = BccSettings(False, BccSettingsTo("[email protected]")) mail_settings.bypass_list_management = BypassListManagement(False) mail_settings.footer_settings = FooterSettings(True, FooterText("w00t"), FooterHtml("<string>w00t!<strong>")) mail_settings.sandbox_mode = SandBoxMode(True) mail_settings.spam_check = SpamCheck(True, SpamThreshold(5), SpamUrl("https://example.com")) message.mail_settings = mail_settings tracking_settings = TrackingSettings() tracking_settings.click_tracking = ClickTracking(True, False) tracking_settings.open_tracking = OpenTracking(True, OpenTrackingSubstitutionTag("open_tracking")) tracking_settings.subscription_tracking = SubscriptionTracking( True, SubscriptionText("Goodbye"), SubscriptionHtml("<strong>Goodbye!</strong>"), SubscriptionSubstitutionTag("unsubscribe")) tracking_settings.ganalytics = Ganalytics( True, UtmSource("utm_source"), UtmMedium("utm_medium"), UtmTerm("utm_term"), UtmContent("utm_content"), UtmCampaign("utm_campaign")) message.tracking_settings = tracking_settings return message.get()
All settings set
17,701
def ageostrophic_wind(heights, f, dx, dy, u, v, dim_order=): r u_geostrophic, v_geostrophic = geostrophic_wind(heights, f, dx, dy, dim_order=dim_order) return u - u_geostrophic, v - v_geostrophic
r"""Calculate the ageostrophic wind given from the heights or geopotential. Parameters ---------- heights : (M, N) ndarray The height field. f : array_like The coriolis parameter. This can be a scalar to be applied everywhere or an array of values. dx : float or ndarray The grid spacing(s) in the x-direction. If an array, there should be one item less than the size of `heights` along the applicable axis. dy : float or ndarray The grid spacing(s) in the y-direction. If an array, there should be one item less than the size of `heights` along the applicable axis. u : (M, N) ndarray The u wind field. v : (M, N) ndarray The u wind field. Returns ------- A 2-item tuple of arrays A tuple of the u-component and v-component of the ageostrophic wind. Notes ----- If inputs have more than two dimensions, they are assumed to have either leading dimensions of (x, y) or trailing dimensions of (y, x), depending on the value of ``dim_order``.
17,702
def build_class(name, basenames=(), doc=None): node = nodes.ClassDef(name, doc) for base in basenames: basenode = nodes.Name() basenode.name = base node.bases.append(basenode) basenode.parent = node return node
create and initialize an astroid ClassDef node
17,703
def emitCurrentRecordChanged(self, item): if self.signalsBlocked(): return if isinstance(item, XOrbRecordItem): self.currentRecordChanged.emit(item.record()) else: self.currentRecordChanged.emit(None)
Emits the record changed signal for the given item, provided the signals are not currently blocked. :param item | <QTreeWidgetItem>
17,704
def create_like(self, repository_id, pull_request_id, thread_id, comment_id, project=None): route_values = {} if project is not None: route_values[] = self._serialize.url(, project, ) if repository_id is not None: route_values[] = self._serialize.url(, repository_id, ) if pull_request_id is not None: route_values[] = self._serialize.url(, pull_request_id, ) if thread_id is not None: route_values[] = self._serialize.url(, thread_id, ) if comment_id is not None: route_values[] = self._serialize.url(, comment_id, ) self._send(http_method=, location_id=, version=, route_values=route_values)
CreateLike. [Preview API] Add a like on a comment. :param str repository_id: The repository ID of the pull request's target branch. :param int pull_request_id: ID of the pull request. :param int thread_id: The ID of the thread that contains the comment. :param int comment_id: The ID of the comment. :param str project: Project ID or project name
17,705
def compile_dependencies(self, sourcepath, include_self=False): items = self.inspector.parents(sourcepath) if include_self: items.add(sourcepath) return filter(None, [self.compile_source(item) for item in items])
Apply compile on all dependencies Args: sourcepath (string): Sass source path to compile to its destination using project settings. Keyword Arguments: include_self (bool): If ``True`` the given sourcepath is add to items to compile, else only its dependencies are compiled.
17,706
def _get_sts_token(self): logger.debug("Connecting to STS in region %s", self.region) sts = boto3.client(, region_name=self.region) arn = "arn:aws:iam::%s:role/%s" % (self.account_id, self.account_role) logger.debug("STS assume role for %s", arn) assume_kwargs = { : arn, : } if self.external_id is not None: assume_kwargs[] = self.external_id if self.mfa_serial_number is not None: assume_kwargs[] = self.mfa_serial_number if self.mfa_token is not None: assume_kwargs[] = self.mfa_token role = sts.assume_role(**assume_kwargs) creds = ConnectableCredentials(role) creds.account_id = self.account_id logger.debug("Got STS credentials for role; access_key_id=%s " "(account_id=%s)", creds.access_key, creds.account_id) return creds
Assume a role via STS and return the credentials. First connect to STS via :py:func:`boto3.client`, then assume a role using `boto3.STS.Client.assume_role <https://boto3.readthe docs.org/en/latest/reference/services/sts.html#STS.Client.assume_role>`_ using ``self.account_id`` and ``self.account_role`` (and optionally ``self.external_id``, ``self.mfa_serial_number``, ``self.mfa_token``). Return the resulting :py:class:`~.ConnectableCredentials` object. :returns: STS assumed role credentials :rtype: :py:class:`~.ConnectableCredentials`
17,707
def check_existing_vr_tag(self): primary_images = get_primary_images(self.workflow) if not primary_images: return vr_image = None for image in primary_images: if in image.tag: vr_image = image break if not vr_image: return should_fail = False for registry_name, registry in self.registries.items(): pullspec = vr_image.copy() pullspec.registry = registry_name insecure = registry.get(, False) secret = registry.get(, None) manifest_list = get_manifest_list(pullspec, registry_name, insecure, secret) if manifest_list: self.log.error("Primary tag already exists in registry: %s", pullspec) should_fail = True if should_fail: raise RuntimeError("Primary tag already exists in registry")
Checks if version-release tag (primary not floating tag) exists already, and fails plugin if it does.
17,708
def validate_instance(cls, opts): if opts.loop and (not opts.v2 or opts.v1): raise OptionsError( ) if opts.loop and not opts.enable_pantsd: raise OptionsError() if opts.v2_ui and not opts.v2: raise OptionsError()
Validates an instance of global options for cases that are not prohibited via registration. For example: mutually exclusive options may be registered by passing a `mutually_exclusive_group`, but when multiple flags must be specified together, it can be necessary to specify post-parse checks. Raises pants.option.errors.OptionsError on validation failure.
17,709
def addStencilBranch(self, disp, weight): self.stencil[tuple(disp)] = weight self.__setPartionLogic(disp)
Set or overwrite the stencil weight for the given direction @param disp displacement vector @param weight stencil weight
17,710
def set_ontime(self, ontime): try: ontime = float(ontime) except Exception as err: LOG.debug("SwitchPowermeter.set_ontime: Exception %s" % (err,)) return False self.actionNodeData("ON_TIME", ontime)
Set duration th switch stays on when toggled.
17,711
def close(self): self.cancel() self.backend.close() self._closed = True
Close the channel to the queue.
17,712
def delete_local_file(file_name): try: os.remove(file_name) log.info(f"Deletion for {file_name} has finished") return file_name except OSError: pass
Deletes the file associated with the file_name passed from local storage. :param str file_name: Filename of the file to be deleted :return str: Filename of the file that was just deleted
17,713
def integrate_data(xdata, ydata, xmin=None, xmax=None, autozero=0): [xdata, ydata] = sort_matrix([xdata,ydata],0) xdata = _n.array(xdata) ydata = _n.array(ydata) if xmin is None: xmin = min(xdata) if xmax is None: xmax = max(xdata) imin = xdata.searchsorted(xmin) imax = xdata.searchsorted(xmax) xint = [xdata[imin]] yint = [0] if autozero >= 1: zero = _n.average(ydata[imin:imin+int(autozero)]) ydata = ydata-zero for n in range(imin+1,imax): if len(yint): xint.append(xdata[n]) yint.append(yint[-1]+0.5*(xdata[n]-xdata[n-1])*(ydata[n]+ydata[n-1])) else: xint.append(xdata[n]) yint.append(0.5*(xdata[n]-xdata[n-1])*(ydata[n]+ydata[n-1])) return _n.array(xint), _n.array(yint)
Numerically integrates up the ydata using the trapezoid approximation. estimate the bin width (scaled by the specified amount). Returns (xdata, integrated ydata). autozero is the number of data points to use as an estimate of the background (then subtracted before integrating).
17,714
def delete(self, ids): url = build_uri_with_ids(, ids) return super(ApiV4IPv6, self).delete(url)
Method to delete ipv6's by their ids :param ids: Identifiers of ipv6's :return: None
17,715
def query(self, query=None): if query is None: return self.url.query self.url.query = query
If query is given, modify the URL correspondingly, return the current query otherwise.
17,716
def sitemap(self): if (self.sitemap_name is not None): return(self.sitemap_name) return(self.sitemap_uri(self.resource_list_name))
Return the sitemap URI based on maps or explicit settings.
17,717
def Append(self, component=None, **kwarg): if component is None: component = self.__class__(**kwarg) if self.HasField("pathtype"): self.last.nested_path = component else: for k, v in iteritems(kwarg): setattr(self, k, v) self.SetRawData(component.GetRawData()) return self
Append a new pathspec component to this pathspec.
17,718
def getPlayAreaRect(self): fn = self.function_table.getPlayAreaRect rect = HmdQuad_t() result = fn(byref(rect)) return result, rect
Returns the 4 corner positions of the Play Area (formerly named Soft Bounds). Corners are in counter-clockwise order. Standing center (0,0,0) is the center of the Play Area. It's a rectangle. 2 sides are parallel to the X axis and 2 sides are parallel to the Z axis. Height of every corner is 0Y (on the floor).
17,719
def list_vpnservices(retrieve_all=True, profile=None, **kwargs): * conn = _auth(profile) return conn.list_vpnservices(retrieve_all, **kwargs)
Fetches a list of all configured VPN services for a tenant CLI Example: .. code-block:: bash salt '*' neutron.list_vpnservices :param retrieve_all: True or False, default: True (Optional) :param profile: Profile to build on (Optional) :return: List of VPN service
17,720
def join_tags(tags): names = [] delimiter = settings.TAGGIT_SELECTIZE[] for tag in tags: name = tag.name if delimiter in name or in name: names.append( % name) else: names.append(name) return delimiter.join(sorted(names))
Given list of ``Tag`` instances, creates a string representation of the list suitable for editing by the user, such that submitting the given string representation back without changing it will give the same list of tags. Tag names which contain DELIMITER will be double quoted. Adapted from Taggit's _edit_string_for_tags() Ported from Jonathan Buchanan's `django-tagging <http://django-tagging.googlecode.com/>`_
17,721
def variations(word): if len(word) == 1: return [[word[0]]] elif word == : return [[]] elif word == : return [[]] elif word == : return [[]] elif word in [, ]: return [[]] elif word == : return [[], [, ]] elif word in [, , , , , ]: return [[word]] elif word in ["ei"]: return [["oo", "u"]] elif word in ["a", "o", "u"]: return [[word[0] + "a", "o", "u", "" + word[1]]] elif len(word) == 2 and word[0] == word[1]: return [[word[0]]] if word[:2] == : return [[] + i for i in variations(word[2:])] elif word[:2] == : return [[] + i for i in variations(word[2:])] elif word[:2] in [, ]: return [[] + i for i in variations(word[2:])] elif word[:3] == : return \ [[] + i for i in variations(word[3:])] + \ [[, ] + i for i in variations(word[3:])] + \ [[, , ] + i for i in variations(word[3:])] elif word[:2] in [, , , , , ]: return \ [[word[:2]] + i for i in variations(word[2:])] + \ [[word[0]] + i for i in variations(word[1:])] elif word[:2] in ["a", "o", "u"]: return [[word[:2]] + i for i in variations(word[2:])] elif word[:3] in ["ei"]: return [["oo", "u"] + i for i in variations(word[3:])] elif word[:2] in ["e", "i", "A"]: return [[word[:2]] + i for i in variations(word[2:])] elif len(word) >= 2 and word[0] == word[1]: return [[word[0]] + i for i in variations(word[2:])] else: return [[word[0]] + i for i in variations(word[1:])]
Create variations of the word based on letter combinations like oo, sh, etc.
17,722
def get_canvas_image(self): if self._imgobj is not None: return self._imgobj try: self._imgobj = self.canvas.get_object_by_tag(self._canvas_img_tag) self._imgobj.add_callback(, self._image_set_cb) except KeyError: return self._imgobj
Get canvas image object. Returns ------- imgobj : `~ginga.canvas.types.image.NormImage` Normalized image sitting on the canvas.
17,723
def returns(self) -> T.Optional[DocstringReturns]: try: return next( DocstringReturns.from_meta(meta) for meta in self.meta if meta.args[0] in {"return", "returns", "yield", "yields"} ) except StopIteration: return None
Return return information indicated in docstring.
17,724
async def get_next_opponent(self): next_match = await self.get_next_match() if next_match is not None: opponent_id = next_match.player1_id if next_match.player2_id == self._id else next_match.player2_id return await self._tournament.get_participant(opponent_id) return None
Get the opponent of the potential next match. See :func:`get_next_match` |methcoro| Raises: APIException
17,725
def print_fields(bf, *args, **kwargs): vals = {k: hex(v) for k, v in bf.items()} print(bf.base, vals, *args, **kwargs)
Print all the fields of a Bitfield object to stdout. This is primarly a diagnostic aid during debugging.
17,726
def ensure_ndarray(ndarray_or_adjusted_array): if isinstance(ndarray_or_adjusted_array, ndarray): return ndarray_or_adjusted_array elif isinstance(ndarray_or_adjusted_array, AdjustedArray): return ndarray_or_adjusted_array.data else: raise TypeError( "Can't convert %s to ndarray" % type(ndarray_or_adjusted_array).__name__ )
Return the input as a numpy ndarray. This is a no-op if the input is already an ndarray. If the input is an adjusted_array, this extracts a read-only view of its internal data buffer. Parameters ---------- ndarray_or_adjusted_array : numpy.ndarray | zipline.data.adjusted_array Returns ------- out : The input, converted to an ndarray.
17,727
def get_darker_image(self): icon_pressed = self.icon.copy() for x in range(self.w): for y in range(self.h): r, g, b, *_ = tuple(self.icon.get_at((x, y))) const = 0.8 r = int(const * r) g = int(const * g) b = int(const * b) icon_pressed.set_at((x, y), (r, g, b)) return icon_pressed
Returns an icon 80% more dark
17,728
def get_ISI_ratio(sorting, sampling_frequency, unit_ids=None, save_as_property=True): ISI_ratios = [] if unit_ids is None: unit_ids = sorting.get_unit_ids() for unit_id in unit_ids: unit_spike_train = sorting.get_unit_spike_train(unit_id) ref_frame_period = sampling_frequency*0.002 long_interval = sampling_frequency*0.02 ISIs = np.diff(unit_spike_train) num_ref_violations = float(sum(ISIs<ref_frame_period)) num_longer_interval = float(sum(ISIs<long_interval)) ISI_ratio = num_ref_violations / num_longer_interval if save_as_property: sorting.set_unit_property(unit_id, , ISI_ratio) ISI_ratios.append(ISI_ratio) return ISI_ratios
This function calculates the ratio between the frequency of spikes present within 0- to 2-ms (refractory period) interspike interval (ISI) and those at 0- to 20-ms interval. It then returns the ratios and also adds a property, ISI_ratio, for the passed in sorting extractor. Taken from: "Large-scale, high-density (up to 512 channels) recording of local circuits in behaving animals" - Antal Berényi, et al. Parameters ---------- unit_ids: list List of unit ids for which to get ISIratios sorting: SortingExtractor SortingExtractor for the results file being analyzed sampling_frequency: float The sampling frequency of recording save_as_property: boolean If True, this will save the ISI_ratio as a property in the given sorting extractor. Returns ---------- ISI_ratios: list of floats A list of ratios for each unit passed into this function. Each ratio is the ratio between the frequency of spikes present within 0- to 2-ms ISI and those at 0- to 20-ms interval for the corresponding spike train.
17,729
def execution_context(self): if self._execution_context is None: self._execution_context = ExecutionContextList( self._version, flow_sid=self._solution[], execution_sid=self._solution[], ) return self._execution_context
Access the execution_context :returns: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextList :rtype: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextList
17,730
def audio(audio, sample_rate, name=None, out=None, subdir=, timeout=5, **kwargs): from chainerui.report.audio_report import check_available if not check_available(): return from chainerui.report.audio_report import report as _audio out_root = _chainerui_asset_observer.get_outpath(out) out_path = os.path.join(out_root, subdir) if not os.path.isdir(out_path): os.makedirs(out_path) col_name = name if col_name is None: col_name = filename, created_at = _audio(audio, sample_rate, out_path, col_name) value = kwargs value[] = created_at.isoformat() value[] = {col_name: os.path.join(subdir, filename)} _chainerui_asset_observer.add(value) _chainerui_asset_observer.save(out_root, timeout)
summary audio files to listen on a browser. An sampled array is converted as WAV audio file, saved to output directory, and reported to the ChainerUI server. The audio file is saved every called this function. The audio file will be listened on `assets` endpoint vertically. If need to aggregate audio files in row, use :func:`~chainerui.summary.reporter`. Example of how to set arguments:: >>> from chainerui import summary >>> summary.set_out('/path/to/output') >>> rate = 44100 >>> >>> summary.audio(sampled_array, rate, name='test') >>> # sampled_array can be listened on a browser. Add description about the audio file:: >>> summary.image( >>> sampled_array, rate, name='test', epoch=1, iteration=100) >>> # 'epoch' and 'iteration' column will be shown. Args: audio (:class:`numpy.ndarray` or :class:`cupy.ndarray` or \ :class:`chainer.Variable`): sampled wave array. sample_rate (int): sampling rate. name (str): name of image. set as column name. when not setting, assigned ``'audio'``. out (str): directory path of output. subdir (str): sub-directory path of output. **kwargs (dict): key-value pair to show as description. regardless of empty or not, timestamp on created the image is added.
17,731
def size(): try: assert os != and sys.stdout.isatty() rows, columns = os.popen(, ).read().split() except (AssertionError, AttributeError, ValueError): rows, columns = DEFAULT_HEIGHT, DEFAULT_WIDTH return int(rows), int(columns)
Determines the height and width of the console window Returns: tuple of int: The height in lines, then width in characters
17,732
def start_element(self, name, attrs): self.in_tag = (name == self.tag) self.url = u""
Set tag status for start element.
17,733
def iter_commit_activity(self, number=-1, etag=None): url = self._build_url(, , base_url=self._api) return self._iter(int(number), url, dict, etag=etag)
Iterate over last year of commit activity by week. See: http://developer.github.com/v3/repos/statistics/ :param int number: (optional), number of weeks to return. Default -1 will return all of the weeks. :param str etag: (optional), ETag from a previous request to the same endpoint :returns: generator of dictionaries .. note:: All statistics methods may return a 202. On those occasions, you will not receive any objects. You should store your iterator and check the new ``last_status`` attribute. If it is a 202 you should wait before re-requesting. .. versionadded:: 0.7
17,734
def find_closest_match(target_track, tracks): track = None tracks_with_match_ratio = [( track, get_similarity(target_track.artist, track.artist), get_similarity(target_track.name, track.name), ) for track in tracks] sorted_tracks = sorted( tracks_with_match_ratio, key=lambda t: (t[1], t[2]), reverse=True ) if sorted_tracks: track = sorted_tracks[0][0] return track
Return closest match to target track
17,735
def tap_hold(self, x, y, duration=1.0): data = {: x, : y, : duration} return self.http.post(, data=data)
Tap and hold for a moment Args: - x, y(int): position - duration(float): seconds of hold time [[FBRoute POST:@"/wda/touchAndHold"] respondWithTarget:self action:@selector(handleTouchAndHoldCoordinate:)],
17,736
def save(): if request.method == : exp_id = session.get() app.logger.debug( %exp_id) fields = get_post_fields(request) result_file = app.save_data(session=session, content=fields, exp_id=exp_id) experiments = app.finish_experiment(session, exp_id) app.logger.info( % (exp_id, len(experiments)))
save is a view to save data. We might want to adjust this to allow for updating saved data, but given single file is just one post for now
17,737
def write(self) -> None: if self.folders: init = hydpy.pub.timegrids.init timeunits = init.firstdate.to_cfunits() timepoints = init.to_timepoints() for folder in self.folders.values(): for file_ in folder.values(): file_.write(timeunits, timepoints)
Call method |NetCDFFile.write| of all handled |NetCDFFile| objects.
17,738
def getAnalogType(self,num): listidx = self.An.index(num) unit = self.uu[listidx] if unit == or unit == : return elif unit == or unit == : return else: print return 0
Returns the type of the channel 'num' based on its unit stored in the Comtrade header file. Returns 'V' for a voltage channel and 'I' for a current channel.
17,739
def create_helper_trans_node(op_name, input_node, node_name): node_name = op_name + "_" + node_name trans_node = onnx.helper.make_node( , inputs=[input_node], outputs=[node_name], name=node_name ) return trans_node
create extra transpose node for dot operator
17,740
def normalize(seq): s = float(sum(seq)) return [v/s for v in seq]
Scales each number in the sequence so that the sum of all numbers equals 1.
17,741
def iter_links(self, file, encoding=None, context=False): if context: return [item for item in self.iter_text(file, encoding) if item[1]] else: return [item[0] for item in self.iter_text(file, encoding) if item[1]]
Return the links. This function is a convenience function for calling :meth:`iter_text` and returning only the links.
17,742
def get(self, sid): return DocumentContext(self._version, service_sid=self._solution[], sid=sid, )
Constructs a DocumentContext :param sid: The sid :returns: twilio.rest.preview.sync.service.document.DocumentContext :rtype: twilio.rest.preview.sync.service.document.DocumentContext
17,743
def make_phase_space_list(): with open() as f: data_str = .join(line for line in f.readlines()) data_str_array = data_str.split()[1:] data_str_matrix = [[j.strip().split() for j in i] for i in[chunk.split()[1:8] for chunk in data_str_array]] return [PhaseSpace(data) for data in data_str_matrix]
Extract all the phase space information (due to ``EMIT`` commands in the input file), and create a list of PhaseSpace objects. The primary purpose of this is for interactive explorations of the data produced during Pynac simulations.
17,744
def Bier(P, Pc, Te=None, q=None): r Pr = P/Pc if Te: return (0.00417*(Pc/1000.)**0.69*Te**0.7*(0.7 + 2.*Pr*(4. + 1./(1.-Pr))))**(1./0.3) elif q: return 0.00417*(Pc/1000.)**0.69*q**0.7*(0.7 + 2.*Pr*(4. + 1./(1. - Pr))) else: raise Exception()
r'''Calculates heat transfer coefficient for a evaporator operating in the nucleate boiling regime according to [1]_ . Either heat flux or excess temperature is required. With `Te` specified: .. math:: h = \left(0.00417P_c^{0.69} \Delta Te^{0.7}\left[0.7 + 2P_r\left(4 + \frac{1}{1-P_r}\right) \right]\right)^{1/0.3} With `q` specified: .. math:: h = 0.00417P_c^{0.69} \Delta q^{0.7}\left[0.7 + 2P_r\left(4 + \frac{1}{1-P_r}\right) \right] Parameters ---------- P : float Saturation pressure of fluid, [Pa] Pc : float Critical pressure of fluid, [Pa] Te : float, optional Excess wall temperature, [K] q : float, optional Heat flux, [W/m^2] Returns ------- h : float Heat transfer coefficient [W/m^2/K] Notes ----- No examples of this are known. Seems to give very different results than other correlations. Examples -------- Water boiling at 1 atm, with excess temperature of 4.3 K from [1]_. >>> Bier(101325., 22048321.0, Te=4.3) 1290.5349471503353 References ---------- .. [1] Rohsenow, Warren and James Hartnett and Young Cho. Handbook of Heat Transfer, 3E. New York: McGraw-Hill, 1998.
17,745
def report_exception(self, filename, exc): event = AbinitError(src_file="Unknown", src_line=0, message=str(exc)) return EventReport(filename, events=[event])
This method is used when self.parser raises an Exception so that we can report a customized :class:`EventReport` object with info the exception.
17,746
def set_global_permissions(self, global_permissions): content = self._serialize.body(global_permissions, ) response = self._send(http_method=, location_id=, version=, content=content) return self._deserialize(, self._unwrap_collection(response))
SetGlobalPermissions. [Preview API] Set service-wide permissions that govern feed creation. :param [GlobalPermission] global_permissions: New permissions for the organization. :rtype: [GlobalPermission]
17,747
def _run_services(self, pants_services): if not pants_services.services: self._logger.critical() return service_thread_map = {service: self._make_thread(service) for service in pants_services.services} for service, service_thread in service_thread_map.items(): self._logger.info(.format(service)) try: service_thread.start() except (RuntimeError, FSEventService.ServiceError): self.shutdown(service_thread_map) raise PantsDaemon.StartupFailure(.format(service)) self.write_pid() self.write_metadata_by_name(, self.FINGERPRINT_KEY, ensure_text(self.options_fingerprint)) while not self.is_killed: for service, service_thread in service_thread_map.items(): if not service_thread.is_alive(): self.shutdown(service_thread_map) raise PantsDaemon.RuntimeFailure(.format(service)) else: service_thread.join(self.JOIN_TIMEOUT_SECONDS)
Service runner main loop.
17,748
def fit_points_in_bounding_box(df_points, bounding_box, padding_fraction=0): df_scaled_points = df_points.copy() offset, padded_scale = fit_points_in_bounding_box_params(df_points, bounding_box, padding_fraction) df_scaled_points[[, ]] *= padded_scale df_scaled_points[[, ]] += offset return df_scaled_points
Return data frame with ``x``, ``y`` columns scaled to fit points from :data:`df_points` to fill :data:`bounding_box` while maintaining aspect ratio. Arguments --------- df_points : pandas.DataFrame A frame with at least the columns ``x`` and ``y``, containing one row per point. bounding_box: pandas.Series A `pandas.Series` containing numeric `width` and `height` values. padding_fraction : float Fraction of padding to add around points. Returns ------- pandas.DataFrame Input frame with the points with ``x`` and ``y`` values scaled to fill :data:`bounding_box` while maintaining aspect ratio.
17,749
def _load_file(self): self.todolist.erase() self.todolist.add_list(self.todofile.read()) self.completer = PromptCompleter(self.todolist)
Reads the configured todo.txt file and loads it into the todo list instance.
17,750
def execute_cmd(self, *args, **kwargs): slot = self.properties[] base_cmd = ("controller", "slot=%s" % slot) cmd = base_cmd + args return _ssacli(*cmd, **kwargs)
Execute a given hpssacli/ssacli command on the controller. This method executes a given command on the controller. :params args: a tuple consisting of sub-commands to be appended after specifying the controller in hpssacli/ssacli command. :param kwargs: kwargs to be passed to execute() in processutils :raises: HPSSAOperationError, if hpssacli/ssacli operation failed.
17,751
def _to_viewitem(self, prog_var): return DDGViewItem(self._ddg, prog_var, simplified=self._simplified)
Convert a ProgramVariable instance to a DDGViewItem object. :param ProgramVariable prog_var: The ProgramVariable object to convert. :return: The converted DDGViewItem object. :rtype: DDGViewItem
17,752
def get_InsideConvexPoly(self, RelOff=_def.TorRelOff, ZLim=, Spline=True, Splprms=_def.TorSplprms, NP=_def.TorInsideNP, Plot=False, Test=True): return _comp._Ves_get_InsideConvexPoly(self.Poly_closed, self.dgeom[], self.dgeom[], self.dgeom[], RelOff=RelOff, ZLim=ZLim, Spline=Spline, Splprms=Splprms, NP=NP, Plot=Plot, Test=Test)
Return a polygon that is a smaller and smoothed approximation of Ves.Poly, useful for excluding the divertor region in a Tokamak For some uses, it can be practical to approximate the polygon defining the Ves object (which can be non-convex, like with a divertor), by a simpler, sligthly smaller and convex polygon. This method provides a fast solution for computing such a proxy. Parameters ---------- RelOff : float Fraction by which an homothetic polygon should be reduced (1.-RelOff)*(Poly-BaryS) ZLim : None / str / tuple Flag indicating what limits shall be put to the height of the polygon (used for excluding divertor) Spline : bool Flag indiating whether the reduced and truncated polygon shall be smoothed by 2D b-spline curves Splprms : list List of 3 parameters to be used for the smoothing [weights,smoothness,b-spline order], fed to scipy.interpolate.splprep() NP : int Number of points to be used to define the smoothed polygon Plot : bool Flag indicating whether the result shall be plotted for visual inspection Test : bool Flag indicating whether the inputs should be tested for conformity Returns ------- Poly : np.ndarray (2,N) polygon resulting from homothetic transform, truncating and optional smoothing
17,753
def get_system_uptime_output_cmd_error(self, **kwargs): config = ET.Element("config") get_system_uptime = ET.Element("get_system_uptime") config = get_system_uptime output = ET.SubElement(get_system_uptime, "output") cmd_error = ET.SubElement(output, "cmd-error") cmd_error.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
17,754
def status(self): message = self.status_message if message is None: message = STATUS[self.status_code] return % (self.status_code, message)
Allow custom status messages
17,755
def _getDocstringLineno(self, node_type, node): docstringStriped = node.as_string().strip() linenoDocstring = (node.lineno + docstringStriped .count("\n", 0, docstringStriped.index())) if node_type == "module": linenoDocstring += 1 return linenoDocstring
Get line number of the docstring. @param node_type: type of node_type @param node: node of currently checking @return: line number
17,756
def reset_generation(self): with self._lock: self._generation = Generation.NO_GENERATION self.rejoin_needed = True self.state = MemberState.UNJOINED
Reset the generation and memberId because we have fallen out of the group.
17,757
def handleEvent(self, eventObj): if eventObj.type not in (MOUSEMOTION, MOUSEBUTTONUP, MOUSEBUTTONDOWN) or not self.visible: return False if not self.isEnabled: return False clicked = False if (not self.mouseOverButton) and self.rect.collidepoint(eventObj.pos): self.mouseOverButton = True elif self.mouseOverButton and (not self.rect.collidepoint(eventObj.pos)): self.mouseOverButton = False if self.rect.collidepoint(eventObj.pos): if eventObj.type == MOUSEBUTTONDOWN: self.buttonDown = True self.lastMouseDownOverButton = True else: if eventObj.type in (MOUSEBUTTONUP, MOUSEBUTTONDOWN): self.lastMouseDownOverButton = False if eventObj.type == MOUSEBUTTONDOWN: self.mouseIsDown = True doMouseClick = False if eventObj.type == MOUSEBUTTONUP: self.mouseIsDown = False if self.lastMouseDownOverButton: doMouseClick = True self.lastMouseDownOverButton = False if self.buttonDown: self.buttonDown = False if doMouseClick: self.buttonDown = False clicked = True if self.playSoundOnClick: self.soundOnClick.play() self.value = not self.value return clicked
This method should be called every time through the main loop. It handles showing the up, over, and down states of the button. Parameters: | eventObj - the event object obtained by calling pygame.event.get() Returns: | False most of the time | True when the has toggled the checkbox.
17,758
def create(vm_, call=None): prov = get_configured_provider(vm_) if not prov: return profile = vm_.get( , vm_.get(, None)) event_data = vm_.copy() event_data[] = profile __utils__[]( , , .format(vm_[]), args=__utils__[](, event_data, [, , , ]), sock_dir=__opts__[], transport=__opts__[] ) ret = {: vm_[], : {}, : True, : } if not in vm_ and not in vm_: log.debug(, vm_[]) vm_[], vm_[] = salt.utils.cloud.gen_keys( salt.config.get_cloud_config_value( , vm_, __opts__)) kwarg = copy.deepcopy(vm_) kwarg[] = prov[] kwarg[] = profile __utils__[]( , , .format(vm_[]), args=__utils__[](, vm_, [, , , ]), sock_dir=__opts__[], transport=__opts__[] ) cret = _runner().cmd(, [vm_[]], kwarg=kwarg) ret[] = cret ret[] = cret[] if not ret[]: ret[] = .format(vm_[]) else: ret[][] = if in __opts__: __opts__[] = __opts__[] del __opts__[] __utils__[]( , , .format(vm_[]), args=__utils__[](, vm_, [, , , ]), sock_dir=__opts__[], transport=__opts__[] ) return ret
Create an lxc Container. This function is idempotent and will try to either provision or finish the provision of an lxc container. NOTE: Most of the initialization code has been moved and merged with the lxc runner and lxc.init functions
17,759
def check_stat(self, path): statinfo = os.stat(path) st_mtime = datetime.fromtimestamp(statinfo.st_mtime) if platform.system() == : check = st_mtime >= self.start_dt else: st_ctime = datetime.fromtimestamp(statinfo.st_ctime) check = st_mtime >= self.start_dt and st_ctime <= self.end_dt if not check: logger.info("file %r not in datetime period!", path) return check
Checks logfile stat information for excluding files not in datetime period. On Linux it's possible to checks only modification time, because file creation info are not available, so it's possible to exclude only older files. In Unix BSD systems and windows information about file creation date and times are available, so is possible to exclude too newer files.
17,760
def create_object(self, name, experiment_id, model_id, argument_defs, arguments=None, properties=None): identifier = str(uuid.uuid4()).replace(,) directory = os.path.join(self.directory, identifier) if not os.access(directory, os.F_OK): os.makedirs(directory) state = ModelRunIdle() run_properties = { datastore.PROPERTY_NAME: name, datastore.PROPERTY_STATE: str(state), datastore.PROPERTY_MODEL: model_id } if not properties is None: for prop in properties: if not prop in run_properties: run_properties[prop] = properties[prop] run_arguments = {} if not arguments is None: run_arguments = attribute.to_dict(arguments, argument_defs) obj = ModelRunHandle( identifier, run_properties, directory, state, experiment_id, model_id, run_arguments ) self.insert_object(obj) return obj
Create a model run object with the given list of arguments. The initial state of the object is RUNNING. Raises ValueError if given arguments are invalid. Parameters ---------- name : string User-provided name for the model run experiment_id : string Unique identifier of associated experiment object model_id : string Unique model identifier argument_defs : list(attribute.AttributeDefinition) Definition of valid arguments for the given model arguments : list(dict('name':...,'value:...')), optional List of attribute instances properties : Dictionary, optional Set of model run properties. Returns ------- PredictionHandle Object handle for created model run
17,761
def RegexLookup(fieldVal, db, fieldName, lookupType, histObj={}): if lookupType == : lookup_dict = {} elif lookupType in [, ]: lookup_dict = {"fieldName": fieldName} else: raise ValueError("Invalid type") field_val_new = fieldVal pattern = coll = db[lookupType] re_val = coll.find(lookup_dict, [, ]) for row in re_val: try: match = re.match(row[], _DataClean_(field_val_new), flags=re.IGNORECASE) if match: if in row: field_val_new = re.sub(row[], row[], _DataClean_(field_val_new), flags=re.IGNORECASE) else: field_val_new = re.sub(row[], , _DataClean_(field_val_new), flags=re.IGNORECASE) pattern = row[] break except KeyError as Key_error_obj: warnings.warn(, Key_error_obj) if re_val: re_val.close() change = _CollectHistory_(lookupType=lookupType, fromVal=fieldVal, toVal=field_val_new, pattern=pattern) histObjUpd = _CollectHistoryAgg_(contactHist=histObj, fieldHistObj=change, fieldName=fieldName) return field_val_new, histObjUpd
Return a new field value based on match against regex queried from MongoDB :param string fieldVal: input value to lookup :param MongoClient db: MongoClient instance connected to MongoDB :param string lookupType: Type of lookup to perform/MongoDB collection name. One of 'genericRegex', 'fieldSpecificRegex', 'normRegex' :param string fieldName: Field name to query against :param dict histObj: History object to which changes should be appended
17,762
def _cim_keybinding(key, value): if key is not None and isinstance(value, CIMProperty): if value.name.lower() != key.lower(): raise ValueError( _format("Invalid keybinding name: CIMProperty.name must be " "dictionary key {0!A}, but is {1!A}", key, value.name)) return copy_.copy(value.value) if value is None: return None if isinstance(value, six.text_type): return value if isinstance(value, six.binary_type): return _to_unicode(value) if isinstance(value, (bool, CIMInstanceName, CIMType)): return value if builtin_type(value) in number_types: return value if isinstance(value, (CIMClass, CIMInstance)): raise TypeError( _format("Value of keybinding {0!A} cannot be an embedded object: " "{1}", key, type(value))) if isinstance(value, list): raise TypeError( _format("Value of keybinding {0!A} cannot be a list", key)) raise TypeError( _format("Value of keybinding {0!A} has an invalid type: {1}", key, type(value)))
Return a keybinding value, from dict item input (key+value). Key may be None (for unnamed keys). The returned value will be a CIM-typed value, except if it was provided as Python number type (in which case it will remain that type). Invalid types or values cause TypeError or ValueError to be raised.
17,763
def set_frameworkcontroller_config(experiment_config, port, config_file_name): frameworkcontroller_config_data = dict() frameworkcontroller_config_data[] = experiment_config[] response = rest_put(cluster_metadata_url(port), json.dumps(frameworkcontroller_config_data), REST_TIME_OUT) err_message = None if not response or not response.status_code == 200: if response is not None: err_message = response.text _, stderr_full_path = get_log_path(config_file_name) with open(stderr_full_path, ) as fout: fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(, ))) return False, err_message result, message = setNNIManagerIp(experiment_config, port, config_file_name) if not result: return result, message return set_trial_config(experiment_config, port, config_file_name), err_message
set kubeflow configuration
17,764
def from_file(cls, fname, form=None): try: tg = TableGroup.from_file(fname) opfname = None except JSONDecodeError: tg = TableGroup.fromvalue(cls.MD) opfname = fname if len(tg.tables) != 1: raise ValueError() metadata = tg.common_props metadata.update(fname=Path(fname), form=form) return cls( *[{k: None if (k != cls.GRAPHEME_COL and v == cls.NULL) else v for k, v in d.items()} for d in tg.tables[0].iterdicts(fname=opfname)], **metadata)
Read an orthography profile from a metadata file or a default tab-separated profile file.
17,765
def evaluatePotentials(Pot,R,z,phi=None,t=0.,dR=0,dphi=0): return _evaluatePotentials(Pot,R,z,phi=phi,t=t,dR=dR,dphi=dphi)
NAME: evaluatePotentials PURPOSE: convenience function to evaluate a possible sum of potentials INPUT: Pot - potential or list of potentials (dissipative forces in such a list are ignored) R - cylindrical Galactocentric distance (can be Quantity) z - distance above the plane (can be Quantity) phi - azimuth (can be Quantity) t - time (can be Quantity) dR= dphi=, if set to non-zero integers, return the dR, dphi't derivative instead OUTPUT: Phi(R,z) HISTORY: 2010-04-16 - Written - Bovy (NYU)
17,766
def _get_future_devices(self, context): monitor = Monitor.from_netlink(context) monitor.filter_by("hidraw") monitor.start() self._scanning_log_message() for device in iter(monitor.poll, None): if device.action == "add": sleep(1) yield device self._scanning_log_message()
Return a generator yielding new devices.
17,767
def yearly(self): if self._yearly is None: self._yearly = YearlyList(self._version, account_sid=self._solution[], ) return self._yearly
Access the yearly :returns: twilio.rest.api.v2010.account.usage.record.yearly.YearlyList :rtype: twilio.rest.api.v2010.account.usage.record.yearly.YearlyList
17,768
def apply_driver_hacks(self, app, info, options): options.update(dict( json_serializer=lambda data: json.dumps(data, default=encode_node), json_deserializer=lambda data: json.loads(data, object_hook=decode_node), pool_pre_ping=True, )) super(QuiltSQLAlchemy, self).apply_driver_hacks(app, info, options)
Set custom SQLAlchemy engine options: - Teach it to encode and decode our node objects - Enable pre-ping (i.e., test the DB connection before trying to use it)
17,769
def decrypt_report(self, device_id, root, data, **kwargs): report_key = self._verify_derive_key(device_id, root, **kwargs) try: from Crypto.Cipher import AES import Crypto.Util.Counter except ImportError: raise NotFoundError ctr = Crypto.Util.Counter.new(128) encryptor = AES.new(bytes(report_key[:16]), AES.MODE_CTR, counter=ctr) decrypted = encryptor.decrypt(bytes(data)) return {: decrypted}
Decrypt a buffer of report data on behalf of a device. Args: device_id (int): The id of the device that we should encrypt for root (int): The root key type that should be used to generate the report data (bytearray): The data that we should decrypt **kwargs: There are additional specific keyword args that are required depending on the root key used. Typically, you must specify - report_id (int): The report id - sent_timestamp (int): The sent timestamp of the report These two bits of information are used to construct the per report signing and encryption key from the specific root key type. Returns: dict: The decrypted data and any associated metadata about the data. The data itself must always be a bytearray stored under the 'data' key, however additional keys may be present depending on the encryption method used. Raises: NotFoundError: If the auth provider is not able to decrypt the data.
17,770
def prompt(text, default=None, hide_input=False, confirmation_prompt=False, type=None, value_proc=None, prompt_suffix=, show_default=True, err=False): result = None def prompt_func(text): f = hide_input and hidden_prompt_func or visible_prompt_func try: echo(text, nl=False, err=err) return f() except (KeyboardInterrupt, EOFError): elif default is not None: return default try: result = value_proc(value) except UsageError as e: echo( % e.message, err=err) continue if not confirmation_prompt: return result while 1: value2 = prompt_func() if value2: break if value == value2: return result echo(, err=err)
Prompts a user for input. This is a convenience function that can be used to prompt a user for input later. If the user aborts the input by sending a interrupt signal, this function will catch it and raise a :exc:`Abort` exception. .. versionadded:: 6.0 Added unicode support for cmd.exe on Windows. .. versionadded:: 4.0 Added the `err` parameter. :param text: the text to show for the prompt. :param default: the default value to use if no input happens. If this is not given it will prompt until it's aborted. :param hide_input: if this is set to true then the input value will be hidden. :param confirmation_prompt: asks for confirmation for the value. :param type: the type to use to check the value against. :param value_proc: if this parameter is provided it's a function that is invoked instead of the type conversion to convert a value. :param prompt_suffix: a suffix that should be added to the prompt. :param show_default: shows or hides the default value in the prompt. :param err: if set to true the file defaults to ``stderr`` instead of ``stdout``, the same as with echo.
17,771
def grid_edges(shape, inds=None, return_directions=True): if inds is None: inds = np.arange(np.prod(shape)).reshape(shape) if len(shape) == 2: edgx = np.c_[inds[:, :-1].ravel(), inds[:, 1:].ravel()] edgy = np.c_[inds[:-1, :].ravel(), inds[1:, :].ravel()] edges = [edgx, edgy] directions = [ np.ones([edgx.shape[0]], dtype=np.int8) * 0, np.ones([edgy.shape[0]], dtype=np.int8) * 1, ] elif len(shape) == 3: edgx = np.c_[inds[:, :, :-1].ravel(), inds[:, :, 1:].ravel()] edgy = np.c_[inds[:, :-1, :].ravel(), inds[:, 1:, :].ravel()] edgz = np.c_[inds[:-1, :, :].ravel(), inds[1:, :, :].ravel()] edges = [edgx, edgy, edgz] else: logger.error("Expected 2D or 3D data") if return_directions: directions = [] for idirection in range(len(shape)): directions.append( np.ones([edges[idirection].shape[0]], dtype=np.int8) * idirection ) edges = np.concatenate(edges) if return_directions: edge_dir = np.concatenate(directions) return edges, edge_dir else: return edges
Get list of grid edges :param shape: :param inds: :param return_directions: :return:
17,772
def change_event_params(self, handler, **kwargs): if not isinstance(handler, Handler): raise TypeError("given object must be of type Handler.") if not self.remove_handler(handler): raise ValueError("You must pass in a valid handler that already exists.") self.add_handler(handler.type, handler.actions, **kwargs) self.event = handler.event
This allows the client to change the parameters for an event, in the case that there is a desire for slightly different behavior, such as reasigning keys. handler - the handler object that the desired changes are made to. kwargs - the variable number of keyword arguments for the parameters that must match the properties of the corresponding event.
17,773
def geohash(self, key, member, *members, **kwargs): return self.execute( b, key, member, *members, **kwargs )
Returns members of a geospatial index as standard geohash strings. :rtype: list[str or bytes or None]
17,774
def _value_encode(cls, member, value): try: field_validator = cls.fields[member] except KeyError: return cls.valueparse.encode(value) return field_validator.encode(value)
Internal method used to encode values into the hash. :param member: str :param value: multi :return: bytes
17,775
def replace_event_annotations(event, newanns): _humilis = event.get("_humilis", {}) if not _humilis: event["_humilis"] = {"annotation": newanns} else: event["_humilis"]["annotation"] = newanns
Replace event annotations with the provided ones.
17,776
def get_client(self, service, region, public=True, cached=True, client_class=None): if not self.authenticated: raise exc.NotAuthenticated("You must authenticate before trying " "to create clients.") clt = ep = None mapped_service = self.service_mapping.get(service) or service svc = self.services.get(mapped_service) if svc: ep = svc.endpoints.get(region) if ep: clt = ep._get_client(public=public, cached=cached, client_class=client_class) if not clt: raise exc.NoSuchClient("There is no client available for the " "service in the region ." % (service, region)) return clt
Returns the client object for the specified service and region. By default the public endpoint is used. If you wish to work with a services internal endpoints, specify `public=False`. By default, if a client has already been created for the given service, region, and public values, that will be returned. To force a new client to be created, pass 'cached=False'.
17,777
def extract_alzip (archive, compression, cmd, verbosity, interactive, outdir): return [cmd, , outdir, archive]
Extract a ALZIP archive.
17,778
def node_type(node: astroid.node_classes.NodeNG) -> Optional[type]: types = set() try: for var_type in node.infer(): if var_type == astroid.Uninferable or is_none(var_type): continue types.add(var_type) if len(types) > 1: return None except astroid.InferenceError: return None return types.pop() if types else None
Return the inferred type for `node` If there is more than one possible type, or if inferred type is Uninferable or None, return None
17,779
def maybe_download_and_extract(data_root: str, url: str) -> None: os.makedirs(data_root, exist_ok=True) filename = sanitize_url(url) filepath = os.path.join(data_root, filename) if os.path.exists(filepath): logging.info(, filepath) return try: logging.info(, filepath) req = requests.get(url, stream=True) req.raise_for_status() except requests.exceptions.RequestException as ex: logging.error(, filepath, ex) return expected_size = int(req.headers.get()) chunk_size = 1024 with open(filepath, ) as f_out,\ click.progressbar(req.iter_content(chunk_size=chunk_size), length=expected_size/chunk_size) as bar: for chunk in bar: if chunk: f_out.write(chunk) f_out.flush() try: shutil.unpack_archive(filepath, data_root) except (shutil.ReadError, ValueError): logging.info(, filepath)
Maybe download the specified file to ``data_root`` and try to unpack it with ``shutil.unpack_archive``. :param data_root: data root to download the files to :param url: url to download from
17,780
def _get_domain(conn, *vms, **kwargs): ret = list() lookup_vms = list() all_vms = [] if kwargs.get(, True): for id_ in conn.listDomainsID(): all_vms.append(conn.lookupByID(id_).name()) if kwargs.get(, True): for id_ in conn.listDefinedDomains(): all_vms.append(id_) if not all_vms: raise CommandExecutionError() if vms: for name in vms: if name not in all_vms: raise CommandExecutionError(.format(name=name)) else: lookup_vms.append(name) else: lookup_vms = list(all_vms) for name in lookup_vms: ret.append(conn.lookupByName(name)) return len(ret) == 1 and not kwargs.get() and ret[0] or ret
Return a domain object for the named VM or return domain object for all VMs. :params conn: libvirt connection object :param vms: list of domain names to look for :param iterable: True to return an array in all cases
17,781
def _get_envelopes_centroid(envelopes): xmin, xmax, ymin, ymax = _get_envelopes_min_maxes(envelopes) return np.mean(xmin, xmax), np.mean(ymin, ymax)
Returns the centroid of an inputted geometry column. Not currently in use, as this is now handled by this library's CRS wrapper directly. Light wrapper over ``_get_envelopes_min_maxes``. Parameters ---------- envelopes : GeoSeries The envelopes of the given geometries, as would be returned by e.g. ``data.geometry.envelope``. Returns ------- (mean_x, mean_y) : tuple The data centroid.
17,782
def _determine_slot(self, *args): if len(args) <= 1: raise RedisClusterException("No way to dispatch this command to Redis Cluster. Missing key.") command = args[0] if command in [, ]: numkeys = args[2] keys = args[3: 3 + numkeys] slots = {self.connection_pool.nodes.keyslot(key) for key in keys} if len(slots) != 1: raise RedisClusterException("{0} - all keys must map to the same key slot".format(command)) return slots.pop() key = args[1] return self.connection_pool.nodes.keyslot(key)
figure out what slot based on command and args
17,783
def process_byte(self, tag): tag.set_address(self.normal_register.current_address) self.normal_register.move_to_next_address(1)
Process byte type tags
17,784
def add_task(self, task, func=None, **kwargs): if not self.__tasks: raise Exception("Tasks subparsers is disabled") if not in kwargs: if func.__doc__: kwargs[] = func.__doc__ task_parser = self.__tasks.add_parser(task, **kwargs) if self.__add_vq: self.add_vq(task_parser) if func is not None: task_parser.set_defaults(func=func) return task_parser
Add a task parser
17,785
def get_states(self, dump_optimizer=False): return pickle.dumps((self.states, self.optimizer) if dump_optimizer else self.states)
Gets updater states. Parameters ---------- dump_optimizer : bool, default False Whether to also save the optimizer itself. This would also save optimizer information such as learning rate and weight decay schedules.
17,786
def parse_250_row(row: list) -> BasicMeterData: return BasicMeterData(row[1], row[2], row[3], row[4], row[5], row[6], row[7], float(row[8]), parse_datetime(row[9]), row[10], row[11], row[12], float(row[13]), parse_datetime( row[14]), row[15], row[16], row[17], float(row[18]), row[19], row[20], parse_datetime(row[21]), parse_datetime(row[22]))
Parse basic meter data record (250)
17,787
def _thread_loop(self): last_check_time = time.time() messages = [] while True: time_since_last_check = time.time() - last_check_time time_till_next_check = max(0, self.interval - time_since_last_check) try: message = self._queue.get(timeout=time_till_next_check) except queue.Empty: pass else: if message is None: break messages.append(message) should_stop = False while True: try: message = self._queue.get_nowait() except queue.Empty: break if message is None: should_stop = True break messages.append(message) if should_stop: break current_time = time.time() if current_time - last_check_time >= self.interval: last_check_time = current_time for i in range(0, len(messages), self.batch_size): batch = messages[i:i + self.batch_size] self.send_socket(b.join(batch)) messages = [] for i in range(0, len(messages), self.batch_size): batch = messages[i:i + self.batch_size] self.send_socket(b.join(batch))
Background thread used when Sender is in asynchronous/interval mode.
17,788
def __get_dynamic_attr(self, attname, arg, default=None): try: attr = getattr(self, attname) except AttributeError: return default if callable(attr): if hasattr(attr, ): argcount = attr.func_code.co_argcount else: argcount = attr.__call__.func_code.co_argcount if argcount == 2: return attr(arg) else: return attr() return attr
Gets "something" from self, which could be an attribute or a callable with either 0 or 1 arguments (besides self). Stolen from django.contrib.syntication.feeds.Feed.
17,789
def on_message(self, headers, message): with self._handover_cond: while len(self._handover_dict) > 0: self._handover_cond.wait(self._wait_timeout) self._handover_dict[] = headers try: msg_obj = json.loads(message) except Exception: raise self._handover_dict[] = msg_obj self._handover_cond.notifyAll()
Event method that gets called when this listener has received a JMS message (representing an HMC notification). Parameters: headers (dict): JMS message headers, as described for `headers` tuple item returned by the :meth:`~zhmcclient.NotificationReceiver.notifications` method. message (string): JMS message body as a string, which contains a serialized JSON object. The JSON object is described in the `message` tuple item returned by the :meth:`~zhmcclient.NotificationReceiver.notifications` method).
17,790
def remove_role(self, databaseName, roleName, collectionName=None): role = {"databaseName" : databaseName, "roleName" : roleName} if collectionName: role["collectionName"] = collectionName if role in self.roles: self.roles.remove(role)
Remove one role Args: databaseName (str): Database Name roleName (RoleSpecs): role Keyword Args: collectionName (str): Collection
17,791
def has_elem(elem_ref): if not is_elem_ref(elem_ref): return False elif elem_ref[0] == ElemRefObj: return hasattr(elem_ref[1], elem_ref[2]) elif elem_ref[0] == ElemRefArr: return elem_ref[2] in elem_ref[1]
Has element? :param elem_ref: :return:
17,792
def salm2map(salm, s, lmax, Ntheta, Nphi): if Ntheta < 2 or Nphi < 1: raise ValueError("Input values of Ntheta={0} and Nphi={1} ".format(Ntheta, Nphi) + "are not allowed; they must be greater than 1 and 0, respectively.") if lmax < 1: raise ValueError("Input value of lmax={0} ".format(lmax) + "is not allowed; it must be greater than 0 and should be greater " + "than |s|={0}.".format(abs(s))) import numpy as np salm = np.ascontiguousarray(salm, dtype=np.complex128) if salm.shape[-1] < N_lm(lmax): raise ValueError("The input `salm` array of shape {0} is too small for the stated `lmax` of {1}. ".format(salm.shape, lmax) + "Perhaps you forgot to include the (zero) modes with ell<|s|.") map = np.empty(salm.shape[:-1]+(Ntheta, Nphi), dtype=np.complex128) if salm.ndim>1: s = np.ascontiguousarray(s, dtype=np.intc) if s.ndim != salm.ndim-1 or np.product(s.shape) != np.product(salm.shape[:-1]): s = s*np.ones(salm.shape[:-1], dtype=np.intc) _multi_salm2map(salm, map, s, lmax, Ntheta, Nphi) else: _salm2map(salm, map, s, lmax, Ntheta, Nphi) return map
Convert mode weights of spin-weighted function to values on a grid Parameters ---------- salm : array_like, complex, shape (..., (lmax+1)**2) Input array representing mode weights of the spin-weighted function. This array may be multi-dimensional, where initial dimensions may represent different times, for example, or separate functions on the sphere. The final dimension should give the values of the mode weights, in the order described below in the 'Notes' section. s : int or array, int, shape (...) Spin weight of the function. If `salm` is multidimensional and this is an array, its dimensions must match the first dimensions of `salm`, and the different values are the spin weights of the different functions represented by those dimensions. Otherwise, if `salm` is multidimensional and `s` is a single integer, all functions are assumed to have the same spin weight. lmax : int The largest `ell` value present in the input array. Ntheta : int Number of points in the output grid along the polar angle. Nphi : int Number of points in the output grid along the azimuthal angle. Returns ------- map : ndarray, complex, shape (..., Ntheta, Nphi) Values of the spin-weighted function on grid points of the sphere. This array is shaped like the input `salm` array, but has one extra dimension. The final two dimensions describe the values of the function on the sphere. See also -------- spinsfast.map2salm : Roughly the inverse of this function. Notes ----- The input `salm` data should be given in increasing order of `ell` value, always starting with (ell, m) = (0, 0) even if `s` is nonzero, proceeding to (1, -1), (1, 0), (1, 1), etc. Explicitly, the ordering should match this: [f_lm(ell, m) for ell in range(lmax+1) for m in range(-ell, ell+1)] The input is converted to a contiguous complex numpy array if necessary. The output data are presented on this grid of spherical coordinates: np.array([[f(theta, phi) for phi in np.linspace(0.0, 2*np.pi, num=2*lmax+1, endpoint=False)] for theta in np.linspace(0.0, np.pi, num=2*lmax+1, endpoint=True)]) Note that `map2salm` and `salm2map` are not true inverses of each other for several reasons. First, modes with `ell < |s|` should always be zero; they are simply assumed to be zero on input to `salm2map`. It is also possible to define a `map` function that violates this assumption -- for example, having a nonzero average value over the sphere, if the function has nonzero spin `s`, this is impossible. Also, it is possible to define a map of a function with so much angular dependence that it cannot be captured with the given `lmax` value. For example, a discontinuous function will never be perfectly resolved. Example ------- >>> s = -2 >>> lmax = 8 >>> Ntheta = Nphi = 2*lmax + 1 >>> modes = np.zeros(spinsfast.N_lm(lmax), dtype=np.complex128) >>> modes[spinsfast.lm_ind(2, 2, 8)] = 1.0 >>> values = spinsfast.salm2map(modes, s, lmax, Ntheta, Nphi)
17,793
def _get_streams(self): token = self.login(self.get_option("username"), self.get_option("password")) m = self._url_re.match(self.url) scode = m and m.group("scode") or self.get_option("station_code") res = self.session.http.get(self._guide_url, params=dict(token=token)) channels = OrderedDict() for t in itertags(res.text, "a"): if t.attributes.get(): channels[t.attributes.get().lower()] = t.attributes.get().replace("Watch ", "").strip() if not scode: log.error("Station code not provided, use --ustvnow-station-code.") log.info("Available stations are: \n{0} ".format(.join(.format(c, n) for c, n in channels.items()))) return if scode in channels: log.debug("Finding streams for: {0}", channels.get(scode)) r = self.session.http.get(self._stream_url, params={"scode": scode, "token": token, "br_n": "Firefox", "br_v": "52", "br_d": "desktop"}, headers={"User-Agent": useragents.FIREFOX}) data = self.session.http.json(r) return HLSStream.parse_variant_playlist(self.session, data["stream"]) else: log.error("Invalid station-code: {0}", scode)
Finds the streams from tvcatchup.com.
17,794
def dependency_context(package_names, aggressively_remove=False): installed_packages = [] log = logging.getLogger(__name__) try: if not package_names: logging.debug() if package_names: lock = yg.lockfile.FileLock( , timeout=30 * 60) log.info() lock.acquire() log.info( + .join(package_names)) output = subprocess.check_output( [, , , ] + package_names, stderr=subprocess.STDOUT, ) log.debug(, output) installed_packages = jaraco.apt.parse_new_packages( output, include_automatic=aggressively_remove) if not installed_packages: lock.release() log.info( + .join(installed_packages)) yield installed_packages except subprocess.CalledProcessError: log.error("Error occurred installing packages") raise finally: if installed_packages: log.info( + .join(installed_packages)) subprocess.check_call( [, , , ] + installed_packages, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) lock.release()
Install the supplied packages and yield. Finally, remove all packages that were installed. Currently assumes 'aptitude' is available.
17,795
def are_you_sure(msg=): r print(msg) from utool import util_arg from utool import util_str override = util_arg.get_argflag((, , )) if override: print() return True valid_ans = [, ] valid_prompt = util_str.conj_phrase(valid_ans, ) ans = input( % valid_prompt) return ans.lower() in valid_ans
r""" Prompts user to accept or checks command line for -y Args: msg (str): Returns: bool: accept or not
17,796
def EnumMissingModules(): module_handle = ctypes.c_ulong() count = ctypes.c_ulong() process_handle = ctypes.windll.kernel32.OpenProcess( PROCESS_QUERY_INFORMATION | PROCESS_VM_READ, 0, os.getpid()) ctypes.windll.psapi.EnumProcessModules(process_handle, ctypes.byref(module_handle), ctypes.sizeof(module_handle), ctypes.byref(count)) if sys.maxsize > 2**32: handle_type = ctypes.c_ulonglong else: handle_type = ctypes.c_ulong module_list = (handle_type * (count.value // ctypes.sizeof(handle_type)))() ctypes.windll.psapi.EnumProcessModulesEx(process_handle, ctypes.byref(module_list), ctypes.sizeof(module_list), ctypes.byref(count), 2) for x in module_list: module_filename = win32process.GetModuleFileNameEx(process_handle, x) for pattern in MODULE_PATTERNS: if pattern.match(os.path.basename(module_filename)): yield module_filename for venv_file in FILES_FROM_VIRTUALENV: yield os.path.join(sys.prefix, venv_file)
Enumerate all modules which match the patterns MODULE_PATTERNS. PyInstaller often fails to locate all dlls which are required at runtime. We import all the client modules here, we simply introspect all the modules we have loaded in our current running process, and all the ones matching the patterns are copied into the client package. Yields: a source file for a linked dll.
17,797
def move_wheel_files( name, req, wheeldir, user=False, home=None, root=None, pycompile=True, scheme=None, isolated=False, prefix=None, warn_script_location=True ): if not scheme: scheme = distutils_scheme( name, user=user, home=home, root=root, isolated=isolated, prefix=prefix, ) if root_is_purelib(name, wheeldir): lib_dir = scheme[] else: lib_dir = scheme[] info_dir = [] data_dirs = [] source = wheeldir.rstrip(os.path.sep) + os.path.sep installed = {} changed = set() generated = [] if os.path.exists(destfile): os.unlink(destfile) shutil.copyfile(srcfile, destfile) st = os.stat(srcfile) if hasattr(os, "utime"): os.utime(destfile, (st.st_atime, st.st_mtime)) if os.access(srcfile, os.X_OK): st = os.stat(srcfile) permissions = ( st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH ) os.chmod(destfile, permissions) changed = False if fixer: changed = fixer(destfile) record_installed(srcfile, destfile, changed) clobber(source, lib_dir, True) assert info_dir, "%s .dist-info directory not found" % req ep_file = os.path.join(info_dir[0], ) console, gui = get_entrypoints(ep_file) def is_entrypoint_wrapper(name): if name.lower().endswith(): matchname = name[:-4] elif name.lower().endswith(): matchname = name[:-10] elif name.lower().endswith(".pya"): matchname = name[:-4] else: matchname = name return (matchname in console or matchname in gui) for datadir in data_dirs: fixer = None filter = None for subdir in os.listdir(os.path.join(wheeldir, datadir)): fixer = None if subdir == : fixer = fix_script filter = is_entrypoint_wrapper source = os.path.join(wheeldir, datadir, subdir) dest = scheme[subdir] clobber(source, dest, False, fixer=fixer, filter=filter) maker = ScriptMaker(None, scheme[]) maker.clobber = True pip_script = console.pop(, None) if pip_script: if "ENSUREPIP_OPTIONS" not in os.environ: spec = + pip_script generated.extend(maker.make(spec)) if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": spec = % (sys.version[:1], pip_script) generated.extend(maker.make(spec)) spec = % (sys.version[:3], pip_script) generated.extend(maker.make(spec)) pip_ep = [k for k in console if re.match(r, k)] for k in pip_ep: del console[k] easy_install_script = console.pop(, None) if easy_install_script: if "ENSUREPIP_OPTIONS" not in os.environ: spec = + easy_install_script generated.extend(maker.make(spec)) spec = % (sys.version[:3], easy_install_script) generated.extend(maker.make(spec)) easy_install_ep = [ k for k in console if re.match(r, k) ] for k in easy_install_ep: del console[k] if len(console) > 0: generated_console_scripts = maker.make_multiple( [ % kv for kv in console.items()] ) generated.extend(generated_console_scripts) if warn_script_location: msg = message_about_scripts_not_on_PATH(generated_console_scripts) if msg is not None: logger.warning(msg) if len(gui) > 0: generated.extend( maker.make_multiple( [ % kv for kv in gui.items()], {: True} ) ) installer = os.path.join(info_dir[0], ) temp_installer = os.path.join(info_dir[0], ) with open(temp_installer, ) as installer_file: installer_file.write(b) shutil.move(temp_installer, installer) generated.append(installer) record = os.path.join(info_dir[0], ) temp_record = os.path.join(info_dir[0], ) with open_for_csv(record, ) as record_in: with open_for_csv(temp_record, ) as record_out: reader = csv.reader(record_in) outrows = get_csv_rows_for_installed( reader, installed=installed, changed=changed, generated=generated, lib_dir=lib_dir, ) writer = csv.writer(record_out) for row in sorted_outrows(outrows): writer.writerow(row) shutil.move(temp_record, record)
Install a wheel
17,798
def run(self, concurrency=0, outline=False, tail=False, dump=False, *args, **kwargs): plan = self._generate_plan(tail=tail) if not plan.keys(): logger.warn() if not outline and not dump: plan.outline(logging.DEBUG) logger.debug("Launching stacks: %s", ", ".join(plan.keys())) walker = build_walker(concurrency) plan.execute(walker) else: if outline: plan.outline() if dump: plan.dump(directory=dump, context=self.context, provider=self.provider)
Kicks off the build/update of the stacks in the stack_definitions. This is the main entry point for the Builder.
17,799
def enable_network(self, *hostnames): def hostname_filter(hostname, req): if isregex(hostname): return hostname.match(req.url.hostname) return req.url.hostname == hostname for hostname in hostnames: self.use_network_filter(partial(hostname_filter, hostname)) self.networking = True
Enables real networking mode, optionally passing one or multiple hostnames that would be used as filter. If at least one hostname matches with the outgoing traffic, the request will be executed via the real network. Arguments: *hostnames: optional list of host names to enable real network against them. hostname value can be a regular expression.