Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
376,300
def cancel(self): if self.OBSERVE_UPDATES: self.detach() self.ioloop.add_callback(self.cancel_timeouts)
Detach strategy from its sensor and cancel ioloop callbacks.
376,301
def pdna_network_from_bbox( lat_min=None, lng_min=None, lat_max=None, lng_max=None, bbox=None, network_type=, two_way=True, timeout=180, memory=None, max_query_area_size=50 * 1000 * 50 * 1000): nodes, edges = network_from_bbox(lat_min=lat_min, lng_min=lng_min, lat_max=lat_max, lng_max=lng_max, bbox=bbox, network_type=network_type, two_way=two_way, timeout=timeout, memory=memory, max_query_area_size=max_query_area_size) return Network( nodes[], nodes[], edges[], edges[], edges[[]])
Make a Pandana network from a bounding lat/lon box request to the Overpass API. Distance will be in the default units meters. Parameters ---------- lat_min, lng_min, lat_max, lng_max : float bbox : tuple Bounding box formatted as a 4 element tuple: (lng_max, lat_min, lng_min, lat_max) network_type : {'walk', 'drive'}, optional Specify whether the network will be used for walking or driving. A value of 'walk' attempts to exclude things like freeways, while a value of 'drive' attempts to exclude things like bike and walking paths. two_way : bool, optional Whether the routes are two-way. If True, node pairs will only occur once. timeout : int, optional the timeout interval for requests and to pass to Overpass API memory : int, optional server memory allocation size for the query, in bytes. If none, server will use its default allocation size max_query_area_size : float, optional max area for any part of the geometry, in the units the geometry is in Returns ------- network : pandana.Network
376,302
def validate_format(self, obj, pointer=None): if in self.attrs: substituted = { : , : , : , : , : , : , }.get(self.attrs[], self.attrs[]) logger.debug(, substituted) try: return self.formats[substituted](obj) except ValidationError as error: logger.error(error) self.fail(, obj, pointer) return obj
================= ============ Expected draft04 Alias of ----------------- ------------ date-time rfc3339.datetime email email hostname hostname ipv4 ipv4 ipv6 ipv6 uri uri ================= ============
376,303
def set_chat_description( self, chat_id: Union[int, str], description: str ) -> bool: peer = self.resolve_peer(chat_id) if isinstance(peer, (types.InputPeerChannel, types.InputPeerChat)): self.send( functions.messages.EditChatAbout( peer=peer, about=description ) ) else: raise ValueError("The chat_id \"{}\" belongs to a user".format(chat_id)) return True
Use this method to change the description of a supergroup or a channel. You must be an administrator in the chat for this to work and must have the appropriate admin rights. Args: chat_id (``int`` | ``str``): Unique identifier (int) or username (str) of the target chat. description (``str``): New chat description, 0-255 characters. Returns: True on success. Raises: :class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error. ``ValueError`` if a chat_id doesn't belong to a supergroup or a channel.
376,304
def from_str(text): segment_list = chat_message_parser.parse(text) return [ChatMessageSegment(segment.text, **segment.params) for segment in segment_list]
Construct :class:`ChatMessageSegment` list parsed from a string. Args: text (str): Text to parse. May contain line breaks, URLs and formatting markup (simplified Markdown and HTML) to be converted into equivalent segments. Returns: List of :class:`ChatMessageSegment` objects.
376,305
def readTempC(self): v = self._read32() if v & 0x7: return float() if v & 0x80000000: v >>= 18 v -= 16384 else: v >>= 18 return v * 0.25
Return the thermocouple temperature value in degrees celsius.
376,306
def get_none_policy_text(none_policy, verbose=False ): if none_policy is NonePolicy.SKIP: return "accept None without performing validation" if verbose else elif none_policy is NonePolicy.FAIL: return "fail on None without performing validation" if verbose else elif none_policy is NonePolicy.VALIDATE: return "validate None as any other values" if verbose else elif none_policy is NoneArgPolicy.SKIP_IF_NONABLE_ELSE_FAIL: return "accept None without validation if the argument is optional, otherwise fail on None" if verbose \ else elif none_policy is NoneArgPolicy.SKIP_IF_NONABLE_ELSE_VALIDATE: return "accept None without validation if the argument is optional, otherwise validate None as any other " \ "values" if verbose else else: raise ValueError( + str(none_policy))
Returns a user-friendly description of a NonePolicy taking into account NoneArgPolicy :param none_policy: :param verbose: :return:
376,307
def _make_2d_array(self, data): if data.shape != self.mesh_idx.shape: raise ValueError() if np.ma.is_masked(data): raise ValueError() data2d = np.zeros(self._mesh_shape).astype(data.dtype) data2d[self.mesh_yidx, self.mesh_xidx] = data if len(self.mesh_idx) == self.nboxes: return data2d else: mask2d = np.ones(data2d.shape).astype(np.bool) mask2d[self.mesh_yidx, self.mesh_xidx] = False return np.ma.masked_array(data2d, mask=mask2d)
Convert a 1D array of mesh values to a masked 2D mesh array given the 1D mesh indices ``mesh_idx``. Parameters ---------- data : 1D `~numpy.ndarray` A 1D array of mesh values. Returns ------- result : 2D `~numpy.ma.MaskedArray` A 2D masked array. Pixels not defined in ``mesh_idx`` are masked.
376,308
def _bitResponseToValue(bytestring): _checkString(bytestring, description=, minlength=1, maxlength=1) RESPONSE_ON = RESPONSE_OFF = if bytestring == RESPONSE_ON: return 1 elif bytestring == RESPONSE_OFF: return 0 else: raise ValueError(.format(bytestring))
Convert a response string to a numerical value. Args: bytestring (str): A string of length 1. Can be for example ``\\x01``. Returns: The converted value (int). Raises: TypeError, ValueError
376,309
def get_results(self, *, block=False, timeout=None): deadline = None if timeout: deadline = time.monotonic() + timeout / 1000 for message in self.messages: if deadline: timeout = max(0, int((deadline - time.monotonic()) * 1000)) yield message.get_result(block=block, timeout=timeout)
Get the results of each job in the pipeline. Parameters: block(bool): Whether or not to block until a result is set. timeout(int): The maximum amount of time, in ms, to wait for a result when block is True. Defaults to 10 seconds. Raises: ResultMissing: When block is False and the result isn't set. ResultTimeout: When waiting for a result times out. Returns: A result generator.
376,310
def generate_map_from_dataset(self, l_dataset): l_map = [] headers = l_dataset.get_header() print(headers) for row_num, col in enumerate(headers): if col != : l_map.append( + str(row_num) + + l_dataset.force_to_string(col)) for row_num, col in enumerate(headers): if col != : vals = l_dataset.get_distinct_values_from_cols([col]) l_map.append( + col + + str(len(vals[0])) ) for row_num, col in enumerate(headers): if col != : col_vals = l_dataset.count_unique_values(row_num, col, 10) for val_num, v in enumerate(col_vals): l_map.append( + col + + str(val_num) + + v ) return l_map
creates a map file (in the standard CSV format) based on columns of a dataset. 1. read column names, lookup names in list 2. read column content, get highest match of distinct values from ontology lists (eg, Years, countries, cities, ages)
376,311
def asset_save_callback(self, *args, **kwargs): tasksel = self.browser.assetbrws.selected_indexes(2) if not tasksel or not tasksel[0].isValid(): self.statusbar.showMessage() return taskitem = tasksel[0].internalPointer() task = taskitem.internal_data() rtype = djadapter.RELEASETYPES[] descriptor = self.asset_descriptor_le.text() if not self.check_selection_for_save(task, rtype, descriptor): return tfi = TaskFileInfo.get_next(task=task, releasetype=rtype, typ=self._filetype, descriptor=descriptor) self._save_tfi(tfi, asset=True)
Callback for the shot open button :returns: None :rtype: None :raises: None
376,312
def get_auth_settings(): from yacms.conf import settings try: auth_settings = (settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET, settings.TWITTER_ACCESS_TOKEN_KEY, settings.TWITTER_ACCESS_TOKEN_SECRET) except AttributeError: return None else: return auth_settings if all(auth_settings) else None
Returns all the key/secret settings for Twitter access, only if they're all defined.
376,313
def fetch(self, category=CATEGORY_ENTRY): kwargs = {} items = super().fetch(category, **kwargs) return items
Fetch the entries from the url. The method retrieves all entries from a RSS url :param category: the category of items to fetch :returns: a generator of entries
376,314
def register_views(app_name, view_filename, urlpatterns=None): app_module = __import__(app_name) view_module = getattr(app_module, view_filename) views = dir(view_module) for view_name in views: if view_name.endswith(): view = getattr(view_module, view_name) if isinstance(view, object): if urlpatterns: urlpatterns += patterns(, url(r % view_name, view.as_view(), name=view_name), ) else: urlpatterns = patterns(, url(r % view_name, view.as_view(), name=view_name), ) else: pass return urlpatterns
app_name APP名 view_filename views 所在的文件 urlpatterns url中已经存在的urlpatterns return urlpatterns 只导入View结尾的,是类的视图
376,315
def chunk(sentence, format=None): sentence = pos_tag(sentence) crf_model = CRFChunkingPredictor.Instance() result = crf_model.predict(sentence, format) return result
Vietnamese chunking Parameters ========== sentence: {unicode, str} raw sentence Returns ======= tokens: list of tuple with word, pos tag, chunking tag tagged sentence Examples -------- >>> # -*- coding: utf-8 -*- >>> from underthesea import chunk >>> sentence = "Nghi vấn 4 thi thể Triều Tiên trôi dạt bờ biển Nhật Bản" >>> chunk(sentence) [('Nghi vấn', 'N', 'B-NP'), ('4', 'M', 'B-NP'), ('thi thể', 'N', 'B-NP'), ('Triều Tiên', 'Np', 'B-NP'), ('trôi dạt', 'V', 'B-VP'), ('bờ biển', 'N', 'B-NP'), ('Nhật Bản', 'Np', 'B-NP')]
376,316
def disembowel(rest): "Disembowel some(one|thing)!" if rest: stabee = rest karma.Karma.store.change(stabee, -1) else: stabee = "someone nearby" return ( "/me takes %s, brings them down to the basement, ties them to a " "leaky pipe, and once bored of playing with them mercifully " "ritually disembowels them..." % stabee)
Disembowel some(one|thing)!
376,317
def set_resource(self, service_name, resource_name, to_cache): self.services.setdefault(service_name, {}) self.services[service_name].setdefault(, {}) self.services[service_name][].setdefault(resource_name, {}) options = self.services[service_name][][resource_name] classpath = self.build_classpath(to_cache.__bases__[0]) if classpath == : classpath = options[classpath] = to_cache
Sets the resource class within the cache. :param service_name: The service a given ``Resource`` talks to. Ex. ``sqs``, ``sns``, ``dynamodb``, etc. :type service_name: string :param resource_name: The name of the ``Resource``. Ex. ``Queue``, ``Notification``, ``Table``, etc. :type resource_name: string :param to_cache: The class to be cached for the service. :type to_cache: class
376,318
def plot_kde(data, ax, title=None, color=, fill_bt=True): if isinstance(data, list): data = np.asarray(data) e = kde.KDEUnivariate(data.astype(np.float)) e.fit() ax.plot(e.support, e.density, color=color, alpha=0.9, linewidth=2.25) if fill_bt: ax.fill_between(e.support, e.density, alpha=.35, zorder=1, antialiased=True, color=color) if title is not None: t = ax.set_title(title) t.set_y(1.05)
Plot a smoothed (by kernel density estimate) histogram. :type data: numpy array :param data: An array containing the data to be plotted :type ax: matplotlib.Axes :param ax: The Axes object to draw to :type title: str :param title: The plot title :type color: str :param color: The color of the histogram line and fill. Note that the fill will be plotted with an alpha of 0.35. :type fill_bt: bool :param fill_bt: Specify whether to fill the area beneath the histogram line
376,319
def global_instance(cls): try: return GLOBAL_BATCHER.instance except AttributeError: instance = PrioritizedBatcher( **getattr(settings, , {}) ) GLOBAL_BATCHER.instance = instance return instance
Return a per-thread global batcher instance.
376,320
def get_modules(self): if not self.project_abspath: raise TypeError("project_abspath can not be empty.") packages_abspath = self.get_package_abspath() for package_abspath in packages_abspath: self.get_module_name(package_abspath) return self._modules
Get modules by project_abspath and packages_scan. Traverse all files under folder packages_scan which set by customer. And get all modules name.
376,321
def config_hook(self, func): argspec = inspect.getargspec(func) args = [, , ] if not (argspec.args == args and argspec.varargs is None and argspec.keywords is None and argspec.defaults is None): raise ValueError( ) self.config_hooks.append(func) return self.config_hooks[-1]
Decorator to add a config hook to this ingredient. Config hooks need to be a function that takes 3 parameters and returns a dictionary: (config, command_name, logger) --> dict Config hooks are run after the configuration of this Ingredient, but before any further ingredient-configurations are run. The dictionary returned by a config hook is used to update the config updates. Note that they are not restricted to the local namespace of the ingredient.
376,322
def _get_band(self, high_res, low_res, color, ratio): if self.high_resolution_band == color: ret = high_res else: ret = low_res * ratio ret.attrs = low_res.attrs.copy() return ret
Figure out what data should represent this color.
376,323
def bare_except(logical_line, noqa): r if noqa: return regex = re.compile(r"except\s*:") match = regex.match(logical_line) if match: yield match.start(), "E722 do not use bare except'"
r"""When catching exceptions, mention specific exceptions whenever possible. Okay: except Exception: Okay: except BaseException: E722: except:
376,324
def hrule(width=None, char=None): width = width or HRWIDTH char = char or HRCHAR return echo(getline(char, width))
Outputs or returns a horizontal line of the given character and width. Returns printed string.
376,325
def _color_level(str_, level): fore_color, back_color, styles = _get_style_from_config(level) return _color(str_, fore_color, back_color, styles)
Return the string wrapped with the appropriate styling for the message level. The styling will be determined based on the rez configuration. Args: str_ (str): The string to be wrapped. level (str): The message level. Should be one of 'critical', 'error', 'warning', 'info' or 'debug'. Returns: str: The string styled with the appropriate escape sequences.
376,326
def clear(self): if len(self.list): self._LOG.debug("List cleared.") self.list.clear()
Clears the server list
376,327
def get_validators_description(view): action = getattr(view, , None) if action is None: return description = validators = getattr(view, action + , []) for validator in validators: validator_description = get_entity_description(validator) description += + validator_description if description else validator_description return + description if description else
Returns validators description in format: ### Validators: * validator1 name * validator1 docstring * validator2 name * validator2 docstring
376,328
def run_task(message): task = Task.objects.get(pk=message[]) if task.allow_overlap: task.run(message) else: if not task.running: task.running = True task.save() try: task.run(message) finally: task.running = False task.save()
Internal ``RUN_TASK`` consumer to run the task's callable
376,329
def nacm_rule_list_rule_comment(self, **kwargs): config = ET.Element("config") nacm = ET.SubElement(config, "nacm", xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-acm") rule_list = ET.SubElement(nacm, "rule-list") name_key = ET.SubElement(rule_list, "name") name_key.text = kwargs.pop() rule = ET.SubElement(rule_list, "rule") name_key = ET.SubElement(rule, "name") name_key.text = kwargs.pop() comment = ET.SubElement(rule, "comment") comment.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
376,330
def expand(doc, doc_url="param://", params=None): if doc_url.find("://") == -1: Log.error("{{url}} must have a prototcol (eg http://) declared", url=doc_url) url = URL(doc_url) url.query = set_default(url.query, params) phase1 = _replace_ref(doc, url) phase2 = _replace_locals(phase1, [phase1]) return wrap(phase2)
ASSUMING YOU ALREADY PULED THE doc FROM doc_url, YOU CAN STILL USE THE EXPANDING FEATURE USE mo_json_config.expand({}) TO ASSUME CURRENT WORKING DIRECTORY :param doc: THE DATA STRUCTURE FROM JSON SOURCE :param doc_url: THE URL THIS doc CAME FROM (DEFAULT USES params AS A DOCUMENT SOURCE) :param params: EXTRA PARAMETERS NOT FOUND IN THE doc_url PARAMETERS (WILL SUPERSEDE PARAMETERS FROM doc_url) :return: EXPANDED JSON-SERIALIZABLE STRUCTURE
376,331
def run_mash(self): self.pipeline = True mash.Mash(inputobject=self, analysistype=)
Run MASH to determine the closest refseq genomes
376,332
def has_comic(name): names = [ ("Creators/%s" % name).lower(), ("DrunkDuck/%s" % name).lower(), ("GoComics/%s" % name).lower(), ("KeenSpot/%s" % name).lower(), ("ComicGenesis/%s" % name).lower(), ("SmackJeeves/%s" % name).lower(), ] for scraperclass in get_scraperclasses(): lname = scraperclass.getName().lower() if lname in names or lname == name.lower(): return True return False
Check if comic name already exists.
376,333
def _finish(self): self.finished = True if self._callback: self._callback(self) self._finish_event.set()
Mark transition as finished and execute callback.
376,334
def __ordinal(self, num): if 10 <= num % 100 < 20: return str(num) + else: ord_info = {1: , 2: , 3: }.get(num % 10, ) return .format(num, ord_info)
Returns the ordinal number of a given integer, as a string. eg. 1 -> 1st, 2 -> 2nd, 3 -> 3rd, etc.
376,335
def profile_device_delete(name, device_name, remote_addr=None, cert=None, key=None, verify_cert=True): * profile = profile_get( name, remote_addr, cert, key, verify_cert, _raw=True ) return _delete_property_dict_item( profile, , device_name )
Delete a profile device. name : The name of the profile to delete the device. device_name : The name of the device to delete. remote_addr : An URL to a remote Server, you also have to give cert and key if you provide remote_addr and its a TCP Address! Examples: https://myserver.lan:8443 /var/lib/mysocket.sock cert : PEM Formatted SSL Certificate. Examples: ~/.config/lxc/client.crt key : PEM Formatted SSL Key. Examples: ~/.config/lxc/client.key verify_cert : True Wherever to verify the cert, this is by default True but in the most cases you want to set it off as LXD normaly uses self-signed certificates. CLI Example: .. code-block:: bash $ salt '*' lxd.profile_device_delete autostart eth1
376,336
def set(self, instance, value, **kw): return self._set(instance, value, **kw)
Set the value of the field
376,337
def key_to_dimension(self, fact_key, fact_join_col, dimension_name, dimension_join_col, dimension_key): self.sql_text += "UPDATE " + self.fact_table + " op SET op." + fact_key + " = NVL(\n" self.sql_text += " (SELECT MAX (ip." + dimension_key + ")\n" self.sql_text += " FROM " + dimension_name + " ip WHERE " self.sql_text += fact_join_col + " = \n ip." + dimension_join_col + "), -1); \n\n"
create SQL to join a fact table key based on "join_col" to a dimension The fact table is aliased as "op" and the join dimension is aliased as "ip" meaning you can pass substrings or SQL to match values. e.g. the command: aup.key_to_dimension('GENDER_KEY', 'substr(op.GENDER, 1,1)', 'tbl_GENDER', 'gender_code', 'GENDER_KEY') will generate the code: UPDATE table op SET op.gender_key = NVL ( (SELECT MAX (ip.gender_key) FROM tbl_GENDER ip WHERE ip.gender_code = SUBSTR (op.gender, 1, 1)), -1);
376,338
def ReverseComplementMembership(x, y, **kwargs): return ast.Complement( ast.Membership(y, x, **kwargs), **kwargs)
Change (x doesn't contain y) to not(y in x).
376,339
def _options(): opts = sys.argv[1:] return [click.Option((v.split()[0],)) for v in opts if v[0] == and v != ]
Collect all command line options
376,340
def is_valid_pep484_type_hint(typ_hint, allow_forward_refs: bool = False): try: if isinstance(typ_hint, type): return True except: pass try: if allow_forward_refs and is_forward_ref(typ_hint): return True except: pass try: return is_union_type(typ_hint) or is_typevar(typ_hint) except: return False
Returns True if the provided type is a valid PEP484 type hint, False otherwise. Note: string type hints (forward references) are not supported by default, since callers of this function in parsyfiles lib actually require them to be resolved already. :param typ_hint: :param allow_forward_refs: :return:
376,341
def _endpoint_from_socksport_line(reactor, socks_config): if socks_config.startswith(): return UNIXClientEndpoint(reactor, socks_config[5:]) if in socks_config: socks_config = socks_config.split()[0] if in socks_config: host, port = socks_config.split(, 1) port = int(port) else: host = port = int(socks_config) return TCP4ClientEndpoint(reactor, host, port)
Internal helper. Returns an IStreamClientEndpoint for the given config, which is of the same format expected by the SOCKSPort option in Tor.
376,342
def build_row(self, line): items = [] row = dict(items=items) fields = line.split() image_exts = [, ] if not fields: return row for field in fields: ext = os.path.splitext(field)[-1] if ext.lower() in image_exts: items.append( dict(image=field)) else: items.append( dict(text=field)) return row
Line describes an image or images to show Returns a dict with a list of dicts of image names or text items Examples: # A single image to display >>> x.build_row('foo.png') [{'image': 'foo.png'}] # Two images with text in between: >>> x.build_row('foo.png or bar.jpg') [{'image': 'foo.png'}, {'text': 'or'}, {'image': 'bar.png'}]
376,343
def filter_queryset(self, attrs, queryset): if self.instance is not None: for field_name in self.fields: if field_name not in attrs: attrs[field_name] = getattr(self.instance, field_name) filter_kwargs = { field_name: attrs[field_name] for field_name in self.fields } return queryset.filter(**filter_kwargs)
Filter the queryset to all instances matching the given attributes.
376,344
def writeFromDict(dataDict, headers, csvFile): with open(csvFile, "wb") as f: writer = csv.writer(f, delimiter=",") writer.writerow(headers) for row in sorted(dataDict.keys()): writer.writerow(dataDict[row])
Write dictionary to a CSV, where keys are row numbers and values are a list.
376,345
def get_game_for_worker(map_name, directory_id): if map_name == "v100unfriendly": games = ["chopper_command", "boxing", "asterix", "seaquest"] worker_per_game = 5 elif map_name == "human_nice": games = gym_env.ATARI_GAMES_WITH_HUMAN_SCORE_NICE worker_per_game = 5 else: raise ValueError("Unknown worker to game map name: %s" % map_name) games.sort() game_id = (directory_id - 1) // worker_per_game tf.logging.info("Getting game %d from %s." % (game_id, games)) return games[game_id]
Get game for the given worker (directory) id.
376,346
def provision_system_user(items, database_name, overwrite=False, clear=False, skip_user_check=False): from hfos.provisions.base import provisionList from hfos.database import objectmodels if overwrite is True: hfoslog(, lvl=warn, emitter=) overwrite = False system_user_count = objectmodels[].count({: }) if system_user_count == 0 or clear is False: provisionList(Users, , overwrite, clear, skip_user_check=True) hfoslog(, emitter="PROVISIONS") else: hfoslog(, lvl=warn, emitter=)
Provision a system user
376,347
def main_update(self): try: os.nice(1) except AttributeError as er: pass time.sleep(self.refresh) try: while True: timestamp=time.time() self.commit() except Exception as e: self.error=e raise
Main function called by the updater thread. Direct call is unnecessary.
376,348
def clean_year_month(year, month, month_orig): error = False error_msg = "The date given was invalid." if month_orig not in xrange(1, 13) and month_orig is not None: month = now.month error = error_msg while month > 12: month -= 12 year += 1 while month < 1: month += 12 year -= 1 year, month, error = _check_year(year, month, error, error_msg) return year, month, error
If 'month_orig', which is the month given in the url BEFORE any next/prev query strings have been applied, is out of range, sets month to the current month and returns an error message. Also Returns an error message if the year given is +/- 50 years from now. If 'month', which is the month given in the url AFTER any next/prev query strings have been applied, is out of range, adjusts it to be in range (by also adjusting the year).
376,349
def playlist_create( self, name, description=, *, make_public=False, songs=None ): share_state = if make_public else playlist = self._call( mc_calls.PlaylistsCreate, name, description, share_state ).body if songs: playlist = self.playlist_songs_add(songs, playlist) return playlist
Create a playlist. Parameters: name (str): Name to give the playlist. description (str): Description to give the playlist. make_public (bool, Optional): If ``True`` and account has a subscription, make playlist public. Default: ``False`` songs (list, Optional): A list of song dicts to add to the playlist. Returns: dict: Playlist information.
376,350
def getAssemblies(pth): if not os.path.isfile(pth): pth = check_extract_from_egg(pth)[0][0] if pth.lower().endswith(".manifest"): return [] manifestnm = pth + ".manifest" if os.path.isfile(manifestnm): fd = open(manifestnm, "rb") res = {RT_MANIFEST: {1: {0: fd.read()}}} fd.close() elif not winresource: return [] else: try: res = GetManifestResources(pth) except winresource.pywintypes.error, exc: if exc.args[0] == winresource.ERROR_BAD_EXE_FORMAT: logger.info( , pth) return [] raise rv = [] if RT_MANIFEST in res and len(res[RT_MANIFEST]): for name in res[RT_MANIFEST]: for language in res[RT_MANIFEST][name]: try: manifest = Manifest() manifest.filename = ":".join([pth, str(RT_MANIFEST), str(name), str(language)]) manifest.parse_string(res[RT_MANIFEST][name][language], False) except Exception, exc: logger.error("Can not parse manifest resource %s, %s" "from %s", name, language, pth) logger.exception(exc) else: if manifest.dependentAssemblies: logger.debug("Dependent assemblies of %s:", pth) logger.debug(", ".join([assembly.getid() for assembly in manifest.dependentAssemblies])) rv.extend(manifest.dependentAssemblies) return rv
Return the dependent assemblies of a binary.
376,351
def __geometryToGeomTemplate(self, geometry): template = {"geometryType": None, "geometry" : None} if isinstance(geometry, Polyline): template[] = "esriGeometryPolyline" elif isinstance(geometry, Polygon): template[] = "esriGeometryPolygon" elif isinstance(geometry, Point): template[] = "esriGeometryPoint" elif isinstance(geometry, MultiPoint): template[] = "esriGeometryMultipoint" elif isinstance(geometry, Envelope): template[] = "esriGeometryEnvelope" else: raise AttributeError("Invalid geometry type") template[] = geometry.asDictionary return template
Converts a single geometry object to a geometry service geometry template value. Input: geometry - ArcREST geometry object Output: python dictionary of geometry template
376,352
def bios_settings(self): return bios.BIOSSettings( self._conn, utils.get_subresource_path_by(self, ), redfish_version=self.redfish_version)
Property to provide reference to `BIOSSettings` instance It is calculated once when the first time it is queried. On refresh, this property gets reset.
376,353
def enforce_periodic_boundary_conditions( self ): for s in self.sites: for i in range(3): if s.r[i] < 0.0: s.r[i] += self.cell_lengths[i] if s.r[i] > self.cell_lengths[i]: s.r[i] -= self.cell_lengths[i]
Ensure that all lattice sites are within the central periodic image of the simulation cell. Sites that are outside the central simulation cell are mapped back into this cell. Args: None Returns: None
376,354
def xception_entry(inputs, hidden_dim): with tf.variable_scope("xception_entry"): def xnet_resblock(x, filters, res_relu, name): with tf.variable_scope(name): y = common_layers.separable_conv_block( x, filters, [((1, 1), (3, 3)), ((1, 1), (3, 3))], first_relu=True, padding="SAME", force2d=True, name="sep_conv_block") y = common_layers.pool(y, (3, 3), "MAX", "SAME", strides=(2, 2)) return y + common_layers.conv_block( x, filters, [((1, 1), (1, 1))], padding="SAME", strides=(2, 2), first_relu=res_relu, force2d=True, name="res_conv0") tf.summary.image("inputs", inputs, max_outputs=2) x = common_layers.conv_block( inputs, 32, [((1, 1), (3, 3))], first_relu=False, padding="SAME", strides=(2, 2), force2d=True, name="conv0") x = common_layers.conv_block( x, 64, [((1, 1), (3, 3))], padding="SAME", force2d=True, name="conv1") x = xnet_resblock(x, min(128, hidden_dim), True, "block0") x = xnet_resblock(x, min(256, hidden_dim), False, "block1") return xnet_resblock(x, hidden_dim, False, "block2")
Xception entry flow.
376,355
def get_deposit(self, deposit_id, **params): return self.api_client.get_deposit(self.id, deposit_id, **params)
https://developers.coinbase.com/api/v2#show-a-deposit
376,356
def nintegral(wave, indep_min=None, indep_max=None): r ret = copy.copy(wave) _bound_waveform(ret, indep_min, indep_max) return np.trapz(ret._dep_vector, ret._indep_vector)
r""" Return the numerical integral of a waveform's dependent variable vector. The method used is the `trapezoidal <https://en.wikipedia.org/wiki/Trapezoidal_rule>`_ method :param wave: Waveform :type wave: :py:class:`peng.eng.Waveform` :param indep_min: Independent vector start point of computation :type indep_min: integer or float :param indep_max: Independent vector stop point of computation :type indep_max: integer or float :rtype: float .. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]] .. Auto-generated exceptions documentation for .. peng.wave_functions.nintegral :raises: * RuntimeError (Argument \`indep_max\` is not valid) * RuntimeError (Argument \`indep_min\` is not valid) * RuntimeError (Argument \`wave\` is not valid) * RuntimeError (Incongruent \`indep_min\` and \`indep_max\` arguments) .. [[[end]]]
376,357
def envCheckFilter(self, name, attr): flt = self._filters.get(name) if flt: return flt.check(attr) else: raise AttributeError("Undefined filter: %s" % name)
Check if a specific graph attribute is enabled or disabled through the use of a filter based on include_<name> and exclude_<name> environment variables. @param name: Name of the Filter. @param attr: Name of the Attribute. @return: Return True if the attribute is enabled.
376,358
def highres_imu_send(self, time_usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag, abs_pressure, diff_pressure, pressure_alt, temperature, fields_updated, force_mavlink1=False): return self.send(self.highres_imu_encode(time_usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag, abs_pressure, diff_pressure, pressure_alt, temperature, fields_updated), force_mavlink1=force_mavlink1)
The IMU readings in SI units in NED body frame time_usec : Timestamp (microseconds, synced to UNIX time or since system boot) (uint64_t) xacc : X acceleration (m/s^2) (float) yacc : Y acceleration (m/s^2) (float) zacc : Z acceleration (m/s^2) (float) xgyro : Angular speed around X axis (rad / sec) (float) ygyro : Angular speed around Y axis (rad / sec) (float) zgyro : Angular speed around Z axis (rad / sec) (float) xmag : X Magnetic field (Gauss) (float) ymag : Y Magnetic field (Gauss) (float) zmag : Z Magnetic field (Gauss) (float) abs_pressure : Absolute pressure in millibar (float) diff_pressure : Differential pressure in millibar (float) pressure_alt : Altitude calculated from pressure (float) temperature : Temperature in degrees celsius (float) fields_updated : Bitmask for fields that have updated since last message, bit 0 = xacc, bit 12: temperature (uint16_t)
376,359
def convert_html_to_text(value, preserve_urls=False): r s = MLStripper(preserve_urls=preserve_urls) s.feed(value) s.close() return s.get_data()
r""" >>> convert_html_to_text( ... ''' ... <html><body> ... Look &amp; click ... <a href="https://example.com">here</a> ... </body></html>''', preserve_urls=True) 'Look & click here (https://example.com)' >>> convert_html_to_text( ... ''' ... <html><body> ... Look &amp; click ... <a href="https://example.com?timestamp=1234">here</a> ... </body></html>''', preserve_urls=True) 'Look & click here (https://example.com?timestamp=1234)' >>> convert_html_to_text( ... ''' ... <html><body> ... Look &#38; click here ... </body></html>''', preserve_urls=True) 'Look & click here' >>> convert_html_to_text( ... ''' ... <html><body> ... Look &amp; click on ... <a href="https://example.com">https://example.com</a> ... </body></html>''', preserve_urls=True) 'Look & click on https://example.com' >>> convert_html_to_text( ... ''' ... <html><body> ... I'm here, <br> click ... <a href="https://example.com">me</a> ... </body></html>''', preserve_urls=True) "I'm here,\nclick me (https://example.com)" >>> convert_html_to_text( ... ''' ... <html><body> ... I'm here, <br/> click ... <a href="https://example.com">me</a> ... </body></html>''', preserve_urls=True) "I'm here,\nclick me (https://example.com)" >>> convert_html_to_text( ... ''' ... <html><body> ... I'm here, <br/> click ... <a href="https://example.com">me</a> ... </body></html>''') "I'm here,\nclick me" >>> convert_html_to_text( ... ''' ... <html><body> ... <p>I'm here!</p> ... <p>Click <a href="https://example.com">me</a></p> ... </body></html>''', preserve_urls=True) "I'm here!\nClick me (https://example.com)\n" >>> convert_html_to_text( ... ''' ... <html> ... <head> ... <title>I'm here</title> ... </head> ... <body> ... <p>I'm here!</p> ... <p>Click <a href="https://example.com">me</a></p> ... </body> ... </html>''', preserve_urls=True) "I'm here!\nClick me (https://example.com)\n"
376,360
def rename_in_module(occurrences_finder, new_name, resource=None, pymodule=None, replace_primary=False, region=None, reads=True, writes=True): if resource is not None: source_code = resource.read() else: source_code = pymodule.source_code change_collector = codeanalyze.ChangeCollector(source_code) for occurrence in occurrences_finder.find_occurrences(resource, pymodule): if replace_primary and occurrence.is_a_fixed_primary(): continue if replace_primary: start, end = occurrence.get_primary_range() else: start, end = occurrence.get_word_range() if (not reads and not occurrence.is_written()) or \ (not writes and occurrence.is_written()): continue if region is None or region[0] <= start < region[1]: change_collector.add_change(start, end, new_name) return change_collector.get_changed()
Returns the changed source or `None` if there is no changes
376,361
def change_parameters(self,params): no_of_params = 0 for core_param in range(len(self.q)): for approx_param in range(self.q[core_param].param_no): self.q[core_param].vi_change_param(approx_param, params[no_of_params]) no_of_params += 1
Utility function for changing the approximate distribution parameters
376,362
def wrapComponent(comp): if hasattr(comp, ): return comp current_module = sys.modules[__name__] module_classes = {name[1:]: obj for name, obj in inspect.getmembers(sys.modules[__name__], inspect.isclass) if obj.__module__ == __name__} stimclass = comp.__class__.__name__ qclass = module_classes.get(stimclass, QStimulusComponent) return qclass(comp)
Wraps a StimulusComponent with a class containing methods for painting and editing. Class will in fact, be the same as the component provided, but will also be a subclass of QStimulusComponent :param comp: Component to wrap :type comp: subclass of AbstractStimulusComponent :returns: sublass of AbstractStimulusComponent and QStimulusComponent
376,363
def read(self, limit=-1): remaining = self.len - self.parent_fd.tell() + self.offset if limit > remaining or limit == -1: limit = remaining return self.parent_fd.read(limit)
Read content. See file.read
376,364
def rows_to_columns(data, schema=None): if not schema: schema = SchemaTree() all_schema = schema all_leaves = schema.leaves values = {full_name: [] for full_name in all_leaves} reps = {full_name: [] for full_name in all_leaves} defs = {full_name: [] for full_name in all_leaves} def _none_to_column(schema, path, rep_level, def_level): for full_path in all_schema.leaves: if startswith_field(full_path, path): reps[full_path].append(rep_level) defs[full_path].append(def_level) def _value_to_column(value, schema, path, counters, def_level): ptype = type(value) ntype, dtype, ltype, jtype, itype, byte_width = python_type_to_all_types[ptype] if jtype is NESTED: if schema.element.repetition_type != REPEATED: Log.error("Expecting {{path|quote}} to be repeated", path=path) new_path = path if not value: _none_to_column(schema, new_path, get_rep_level(counters), def_level) else: try: new_schema = schema.more.get() if not new_schema: if schema.locked: new_schema = schema schema.element.repetition_type = REQUIRED else: new_path = path new_value = value[0] ptype = type(new_value) new_schema = schema.add( new_path, OPTIONAL, ptype ) if new_value is None or python_type_to_json_type[ptype] in PRIMITIVE: values[new_path] = [] reps[new_path] = [0] * counters[0] defs[new_path] = [0] * counters[0] for k, new_value in enumerate(value): new_counters = counters + (k,) _value_to_column(new_value, new_schema, new_path, new_counters, def_level+1) finally: schema.element.repetition_type = REPEATED elif jtype is OBJECT: if value is None: if schema.element.repetition_type == REQUIRED: Log.error("{{path|quote}} is required", path=path) _none_to_column(schema, path, get_rep_level(counters), def_level) else: if schema.element.repetition_type == REPEATED: Log.error("Expecting {{path|quote}} to be repeated", path=path) if schema.element.repetition_type == REQUIRED: new_def_level = def_level else: counters = counters + (0,) new_def_level = def_level+1 for name, sub_schema in schema.more.items(): new_path = concat_field(path, name) new_value = value.get(name, None) _value_to_column(new_value, sub_schema, new_path, counters, new_def_level) for name in set(value.keys()) - set(schema.more.keys()): if schema.locked: Log.error("{{path}} is not allowed in the schema", path=path) new_path = concat_field(path, name) new_value = value.get(name, None) ptype = type(new_value) sub_schema = schema.add( new_path, REPEATED if isinstance(new_value, list) else OPTIONAL, ptype ) if python_type_to_json_type[ptype] in PRIMITIVE: values[new_path] = [] reps[new_path] = [0] * counters[0] defs[new_path] = [0] * counters[0] _value_to_column(new_value, sub_schema, new_path, counters, new_def_level) else: if jtype is STRING: value = value.encode() merge_schema(schema, path, value) values[path].append(value) if schema.element.repetition_type == REQUIRED: reps[path].append(get_rep_level(counters)) defs[path].append(def_level) else: reps[path].append(get_rep_level(counters)) defs[path].append(def_level + 1) for rownum, new_value in enumerate(data): try: _value_to_column(new_value, schema, , (rownum,), 0) except Exception as e: Log.error("can not encode {{row|json}}", row=new_value, cause=e) return Table(values, reps, defs, len(data), schema)
:param data: array of objects :param schema: Known schema, will be extended to include all properties found in data :return: Table
376,365
def run(self, command, application): if len(command) == 1: profile = application.profile if profile is None: self._output.write( "Current shell profile: no profile configured\n" "You can change profiles using: .profile profile-name\n") else: self._output.write("Current shell profile: %s\n" % profile) elif len(command) == 2: new_profile_name = command[1] application.profile = new_profile_name self._output.write("Current shell profile changed to: %s\n" % new_profile_name) else: self._err.write("Usage:\n%s\n" % self.USAGE)
Get or set the profile. If .profile is called with no args, the current profile is displayed. If the .profile command is called with a single arg, then the current profile for the application will be set to the new value.
376,366
def _wrap_value_with_context(self, tokens: List[Token], start: int, end: int) -> Extraction: return Extraction(.join([x.orth_ if isinstance(x, Token) else x for x in tokens[start:end]]), self.name, start_token=start, end_token=end, start_char=tokens[start].idx if isinstance(tokens[start], Token) else -1, end_char=tokens[end - 1].idx + len(tokens[end - 1].orth_) if isinstance(tokens[end - 1], Token) else -1 )
Wraps the final result
376,367
def pbis(a): return(math.cos(3*a - math.pi), (math.sin(3*a - math.pi)))
End point of a reflected sun ray, given an angle a.
376,368
def main(argv=None): parser = create_parser(, description=__doc__) parser.add_argument(, default=, help="hostname of the site (default: cnx.org)") parser.add_argument(, default=LOG_FORMAT_GZ, choices=LOG_FORMATS, help="(default: {})".format(LOG_FORMAT_GZ)) parser.add_argument(, help="path to the logfile.") args = parser.parse_args(argv) opener = LOG_FORMAT_OPENERS_MAPPING[args.log_format] hostname = args.hostname.replace(, ) url_pattern = URL_PATTERN_TMPLT.format(hostname) url_pattern = re.compile(url_pattern) with opener(args.log_file) as log: hits, start_timestamp, end_timestamp = parse_log(log, url_pattern) settings = get_app_settings_from_arguments(args) connection_string = settings[config.CONNECTION_STRING] db_connection = psycopg2.connect(connection_string) with db_connection: with db_connection.cursor() as cursor: for ident_hash, hit_count in hits.items(): cursor.execute(, (start_timestamp, end_timestamp, hit_count, ident_hash)) cursor.execute("SELECT update_hit_ranks();") db_connection.close() return 0
Count the hits from logfile.
376,369
def parse_rawprofile_blocks(text): delim = delim2 = profile_block_list = ut.regex_split( + delim, text) for ix in range(1, len(profile_block_list)): profile_block_list[ix] = delim2 + profile_block_list[ix] return profile_block_list
Split the file into blocks along delimters and and put delimeters back in the list
376,370
def get_lyrics_genius(song_title): base_url = "http://api.genius.com" headers = {: %(GENIUS_KEY)} search_url = base_url + "/search" data = {: song_title} response = requests.get(search_url, data=data, headers=headers) json = response.json() song_api_path = json["response"]["hits"][0]["result"]["api_path"] song_url = base_url + song_api_path response = requests.get(song_url, headers=headers) json = response.json() path = json["response"]["song"]["path"] page_url = "http://genius.com" + path page = requests.get(page_url) soup = BeautifulSoup(page.text, "html.parser") div = soup.find(,{: }) lyrics = div.find().getText() return lyrics
Scrapes the lyrics from Genius.com
376,371
def _viewbox_set(self, viewbox): self._viewbox = viewbox viewbox.events.mouse_press.connect(self.viewbox_mouse_event) viewbox.events.mouse_release.connect(self.viewbox_mouse_event) viewbox.events.mouse_move.connect(self.viewbox_mouse_event) viewbox.events.mouse_wheel.connect(self.viewbox_mouse_event) viewbox.events.resize.connect(self.viewbox_resize_event)
Friend method of viewbox to register itself.
376,372
def partialReleaseComplete(): a = TpPd(pd=0x6) b = MessageType(mesType=0xf) packet = a / b return packet
PARTIAL RELEASE COMPLETE Section 9.1.27
376,373
def _init_append(self): self._write_buffer[:] = self._readall() self._seek = self._size
Initializes file on 'a' mode.
376,374
def connect(filename: str, mode: str = , *, validate: bool = True, spec_version: str = "2.0.1") -> LoomConnection: return LoomConnection(filename, mode, validate=validate, spec_version=spec_version)
Establish a connection to a .loom file. Args: filename: Path to the Loom file to open mode: Read/write mode, 'r+' (read/write) or 'r' (read-only), defaults to 'r+' validate: Validate the file structure against the Loom file format specification spec_version: The loom file spec version to validate against (e.g. "2.0.1" or "old") Returns: A LoomConnection instance. Remarks: This function should typically be used as a context manager (i.e. inside a ``with``-block): .. highlight:: python .. code-block:: python import loompy with loompy.connect("mydata.loom") as ds: print(ds.ca.keys()) This ensures that the file will be closed automatically when the context block ends Note: if validation is requested, an exception is raised if validation fails.
376,375
def MobileDeviceConfigurationProfile(self, data=None, subset=None): return self.factory.get_object( jssobjects.MobileDeviceConfigurationProfile, data, subset)
{dynamic_docstring}
376,376
def from_s3_json(cls, bucket_name, key, json_path=None, key_mapping=None, aws_profile=None, aws_access_key_id=None, aws_secret_access_key=None, region_name=None): import boto3 ses = boto3.Session( aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=region_name, profile_name=aws_profile, ) s3 = ses.resource("s3") bucket = s3.Bucket(bucket_name) object = bucket.Object(key) data = json.loads(object.get()["Body"].read().decode("utf-8")) return cls._from_json_data(data, json_path, key_mapping)
Load database credential from json on s3. :param bucket_name: str :param key: str :param aws_profile: if None, assume that you are using this from AWS cloud. (service on the same cloud doesn't need profile name) :param aws_access_key_id: str, not recommend to use :param aws_secret_access_key: str, not recommend to use :param region_name: str
376,377
def __modify(self, checkout_id, **kwargs): params = { : checkout_id } return self.make_call(self.__modify, params, kwargs)
Call documentation: `/checkout/modify <https://www.wepay.com/developer/reference/checkout#modify>`_, plus extra keyword parameters: :keyword str access_token: will be used instead of instance's ``access_token``, with ``batch_mode=True`` will set `authorization` param to it's value. :keyword bool batch_mode: turn on/off the batch_mode, see :class:`wepay.api.WePay` :keyword str batch_reference_id: `reference_id` param for batch call, see :class:`wepay.api.WePay` :keyword str api_version: WePay API version, see :class:`wepay.api.WePay`
376,378
def _map_from_binaries(self, eopatch, dst_shape, request_data): if self.feature_name in eopatch[self.feature_type]: raster = eopatch[self.feature_type][self.feature_name].squeeze() else: raster = np.ones(dst_shape, dtype=self.raster_dtype) * self.no_data_val new_raster = self._reproject(eopatch, self._to_binary_mask(request_data)) raster[new_raster != 0] = new_raster[new_raster != 0] return raster
Each request represents a binary class which will be mapped to the scalar `raster_value`
376,379
def synchronizeResponse(self, pid, vendorSpecific=None): mmp_dict = {: pid} return self.POST([], fields=mmp_dict, headers=vendorSpecific)
CNRead.synchronize(session, pid) → boolean POST /synchronize. Args: pid: vendorSpecific:
376,380
def userInformation(MoreData_presence=0): a = TpPd(pd=0x3) b = MessageType(mesType=0x20) c = UserUser() packet = a / b / c if MoreData_presence is 1: d = MoreDataHdr(ieiMD=0xA0, eightBitMD=0x0) packet = packet / d return packet
USER INFORMATION Section 9.3.31
376,381
def register_dataframe_method(method): def inner(*args, **kwargs): class AccessorMethod(object): def __init__(self, pandas_obj): self._obj = pandas_obj @wraps(method) def __call__(self, *args, **kwargs): return method(self._obj, *args, **kwargs) register_dataframe_accessor(method.__name__)(AccessorMethod) return method return inner()
Register a function as a method attached to the Pandas DataFrame. Example ------- .. code-block:: python @register_dataframe_method def print_column(df, col): '''Print the dataframe column given''' print(df[col])
376,382
def points_are_in_a_straight_line( points, tolerance=1e-7 ): a = points[0] b = points[1] for c in points[2:]: if area_of_a_triangle_in_cartesian_space( a, b, c ) > tolerance: return False return True
Check whether a set of points fall on a straight line. Calculates the areas of triangles formed by triplets of the points. Returns False is any of these areas are larger than the tolerance. Args: points (list(np.array)): list of Cartesian coordinates for each point. tolerance (optional:float): the maximum triangle size for these points to be considered colinear. Default is 1e-7. Returns: (bool): True if all points fall on a straight line (within the allowed tolerance).
376,383
def serviceViewChangerOutBox(self, limit: int = None) -> int: msgCount = 0 while self.view_changer.outBox and (not limit or msgCount < limit): msgCount += 1 msg = self.view_changer.outBox.popleft() if isinstance(msg, (InstanceChange, ViewChangeDone)): self.send(msg) else: logger.error("Received msg {} and don't know how to handle it". format(msg)) return msgCount
Service at most `limit` number of messages from the view_changer's outBox. :return: the number of messages successfully serviced.
376,384
def z(self, position=None): p = self.GetPosition() if position is None: return p[2] self.SetPosition(p[0], p[1], position) if self.trail: self.updateTrail() return self
Set/Get actor position along z axis.
376,385
def pop_parameter(key): names = key.split() if len(names) > 1: with parameter_scope(names[0]): return pop_parameter(.join(names[1:])) global current_scope param = current_scope.get(key, None) if param is not None: del current_scope[key] return param
Remove and get parameter by key. Args: key(str): Key of parameter. Returns: ~nnabla.Variable Parameter if key found, otherwise None.
376,386
def _coligative(self, rho, A, fav): prop = {} prop["mu"] = fav["fira"] prop["muw"] = fav["fir"]+rho*fav["fird"]-A*fav["fira"] prop["M"] = 1/((1-A)/Mw+A/Ma) prop["HR"] = 1/A-1 prop["xa"] = A*Mw/Ma/(1-A*(1-Mw/Ma)) prop["xw"] = 1-prop["xa"] return prop
Miscelaneous properties of humid air Parameters ---------- rho : float Density, [kg/m³] A : float Mass fraction of dry air in humid air, [kg/kg] fav : dict dictionary with helmholtz energy and derivatives Returns ------- prop : dict Dictionary with calculated properties: * mu: Relative chemical potential, [kJ/kg] * muw: Chemical potential of water, [kJ/kg] * M: Molar mass of humid air, [g/mol] * HR: Humidity ratio, [-] * xa: Mole fraction of dry air, [-] * xw: Mole fraction of water, [-] References ---------- IAPWS, Guideline on an Equation of State for Humid Air in Contact with Seawater and Ice, Consistent with the IAPWS Formulation 2008 for the Thermodynamic Properties of Seawater, Table 12, http://www.iapws.org/relguide/SeaAir.html
376,387
def to_csv(self, file): file.write("description,time,value\n") for traj in self: for t,v in traj: file.write("%s,%f,%f\n"% (traj.description.symbol, t, v))
Write all the trajectories of a collection to a csv file with the headers 'description', 'time' and 'value'. :param file: a file object to write to :type file: :class:`file` :return:
376,388
def start(self): self.log.info("Starting SSL Session for Monitor %s." % self.monitor_id) if self.socket is not None: raise Exception("Socket already established for %s." % self) try: self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if self.ca_certs is not None: self.socket = ssl.wrap_socket(self.socket, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_certs) else: self.socket = ssl.wrap_socket(self.socket) self.socket.connect((self.client.hostname, PUSH_SECURE_PORT)) self.socket.setblocking(0) except Exception as exception: self.socket.close() self.socket = None raise exception self.send_connection_request()
Creates a SSL connection to the iDigi Server and sends a ConnectionRequest message.
376,389
def center_text_cursor(object): @functools.wraps(object) def center_text_cursor_wrapper(*args, **kwargs): if args: if hasattr(foundations.common.get_first_item(args), "setCenterOnScroll"): foundations.common.get_first_item(args).setCenterOnScroll(True) value = object(*args, **kwargs) if args: if hasattr(foundations.common.get_first_item(args), "setCenterOnScroll"): foundations.common.get_first_item(args).setCenterOnScroll(False) return value return center_text_cursor_wrapper
Centers the text cursor position. :param object: Object to decorate. :type object: object :return: Object. :rtype: object
376,390
def copy_vpcs_configs(source, target): vpcs_files = glob.glob(os.path.join(source, , )) vpcs_hist = os.path.join(source, , ) vpcs_config_path = os.path.join(target, , ) if os.path.isfile(vpcs_hist): vpcs_files.append(vpcs_hist) if len(vpcs_files) > 0: os.makedirs(vpcs_config_path) for old_file in vpcs_files: new_file = os.path.join(vpcs_config_path, os.path.basename(old_file)) shutil.copy(old_file, new_file)
Copy any VPCS configs to the converted topology :param str source: Source topology directory :param str target: Target topology files directory
376,391
def read_string(self, content): header_file = utils.create_temp_file_name(suffix=) with open(header_file, "w+") as f: f.write(content) try: decls = self.read_file(header_file) except Exception: utils.remove_file_no_raise(header_file, self.__config) raise utils.remove_file_no_raise(header_file, self.__config) return decls
Reads a Python string that contains C++ code, and return the declarations tree.
376,392
def log_images(self, name, images, step=None): if isinstance(images, six.string_types): raise TypeError( .format(type(images))) self._check_step(step) tf_name = self._ensure_tf_name(name) summary = self._image_summary(tf_name, images, step=step) self._log_summary(tf_name, summary, images, step=step)
Log new images for given name on given step. Args: name (str): name of the variable (it will be converted to a valid tensorflow summary name). images (list): list of images to visualize step (int): non-negative integer used for visualization
376,393
def remotes_get(self): remotes = {} cmd = self.run([]) ret = filter(None, cmd.split()) for remote_name in ret: remotes[remote_name] = self.remote_get(remote_name) return remotes
Return remotes like git remote -v. :rtype: dict of tuples
376,394
def get_uids(self, filename=None): self._update() if filename: if filename not in self._reminders: return [] return self._reminders[filename].keys() return [uid for uids in self._reminders.values() for uid in uids]
UIDs of all reminders in the file excluding included files If a filename is specified, only it's UIDs are return, otherwise all. filename -- the remind file
376,395
def check_backslashes(self, definition, docstring): r if docstring and in docstring and not docstring.startswith( (, )): return violations.D301()
r'''D301: Use r""" if any backslashes in a docstring. Use r"""raw triple double quotes""" if you use any backslashes (\) in your docstrings.
376,396
def all_dims(self): return [ _get_dims(arr) if not isinstance(arr, ArrayList) else arr.all_dims for arr in self]
The dimensions for each of the arrays in this list
376,397
def run_with_tornado(self): from zengine.tornado_server.server import runserver runserver(self.manager.args.addr, int(self.manager.args.port))
runs the tornado/websockets based test server
376,398
def _collapse_edge_passing_predicates(graph: BELGraph, edge_predicates: EdgePredicates = None) -> None: for u, v, _ in filter_edges(graph, edge_predicates=edge_predicates): collapse_pair(graph, survivor=u, victim=v)
Collapse all edges passing the given edge predicates.
376,399
def read_csv(csv_name, usecols=None): csv_path = os.path.join(DATA_FOLDER, csv_name) csv = pd.read_csv(csv_path, low_memory=False, usecols=usecols, encoding="utf-8") return csv
Returns a DataFrame from a .csv file stored in /data/raw/