docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Searches for a sample in CRITs. Currently only hashes allowed. Args: md5: md5sum sha1: sha1sum sha256: sha256sum Returns: JSON response or None if not found
def get_samples(self, md5='', sha1='', sha256=''): params = {'api_key': self.api_key, 'username': self.username} if md5: params['c-md5'] = md5 if sha1: params['c-sha1'] = sha1 if sha256: params['c-sha256'] = sha256 r = requests.get('{0}/samples/'.format(self.url), params=params, verify=self.verify, proxies=self.proxies) if r.status_code == 200: result_data = json.loads(r.text) if 'meta' in result_data: if 'total_count' in result_data['meta']: if result_data['meta']['total_count'] > 0: return result_data else: log.error('Non-200 status code: {}'.format(r.status_code)) return None
1,113,218
Searches for the backdoor based on name and version. Args: name: The name of the backdoor. This can be an alias. version: The version. Returns: Returns a JSON object contain one or more backdoor results or None if not found.
def get_backdoor(self, name, version=''): params = {} params['or'] = 1 params['c-name'] = name params['c-aliases__in'] = name r = requests.get('{0}/backdoors/'.format(self.url), params=params, verify=self.verify, proxies=self.proxies) if r.status_code == 200: result_data = json.loads(r.text) if 'meta' not in result_data: return None if 'total_count' not in result_data['meta']: return None if result_data['meta']['total_count'] <= 0: return None if 'objects' not in result_data: return None for backdoor in result_data['objects']: if 'version' in backdoor: if backdoor['version'] == version: return backdoor else: log.error('Non-200 status code: {}'.format(r.status_code)) return None
1,113,219
Checks if the two objects are related Args: left_id: The CRITs ID of the first indicator left_type: The CRITs TLO type of the first indicator right_id: The CRITs ID of the second indicator right_type: The CRITs TLO type of the second indicator rel_type: The relationships type ("Related To", etc) Returns: True or False if the relationship exists or not.
def has_relationship(self, left_id, left_type, right_id, right_type, rel_type='Related To'): data = self.get_object(left_id, left_type) if not data: raise CRITsOperationalError('Crits Object not found with id {}' 'and type {}'.format(left_id, left_type)) if 'relationships' not in data: return False for relationship in data['relationships']: if relationship['relationship'] != rel_type: continue if relationship['value'] != right_id: continue if relationship['type'] != right_type: continue return True return False
1,113,220
Update the status of the TLO. By default, the options are: - New - In Progress - Analyzed - Deprecated Args: crits_id: The object id of the TLO crits_type: The type of TLO. This must be 'Indicator', '' status: The status to change. Returns: True if the status was updated. False otherwise. Raises: CRITsInvalidTypeError
def status_update(self, crits_id, crits_type, status): obj_type = self._type_translation(crits_type) patch_url = "{0}/{1}/{2}/".format(self.url, obj_type, crits_id) params = { 'api_key': self.api_key, 'username': self.username, } data = { 'action': 'status_update', 'value': status, } r = requests.patch(patch_url, params=params, data=data, verify=self.verify, proxies=self.proxies) if r.status_code == 200: log.debug('Object {} set to {}'.format(crits_id, status)) return True else: log.error('Attempted to set object id {} to ' 'Informational, but did not receive a ' '200'.format(crits_id)) log.error('Error message was: {}'.format(r.text)) return False
1,113,222
Turn a FrameFunction into an Animation. Args: frame_function: A function that returns a FrameGenerator. Returns: an Animation decorator function.
def animation(frame_function: types.FrameFunction) -> types.Animation: animation_ = core.Animation(frame_function) @functools.wraps(frame_function) def wrapper(*args, **kwargs): return animation_(*args, **kwargs) return wrapper
1,113,446
Wrapper function for the _Animate wrapper class. Args: func: A function to run while animation is showing. animation: An AnimationGenerator that yields animation frames. step: Approximate timestep (in seconds) between frames. Returns: An animated version of func if func is not None. Otherwise, a function that takes a function and returns an animated version of that.
def animate(func: types.AnyFunction = None, *, animation: types.AnimationGenerator = _default_animation(), step: float = 0.1) -> types.AnyFunction: if callable(func): return _animate_no_kwargs(func, animation, step) elif func is None: return _animate_with_kwargs(animation_gen=animation, step=step) else: raise TypeError("argument 'func' must either be None or callable")
1,113,447
Retrieve recently changed / added docs Args: include_docs <bools> if true full document data will be retrieved limit <int> if != None and > 0 limit the result set to this amount of rows Returns a view result to be iterated through
def recent_docs(self, include_docs=True, limit=None): try: return self.bucket.view("_changed", include_docs=include_docs, limit=limit) except: raise
1,113,560
Sets up this Node with the specified Interfaces before it is run. Args: puller: Indication if a Puller Interface should be created. subscriptions: Collection of the Subscriber Interfaces to be created and their Slots.
def setup(self, puller: bool=None, subscriptions: Dict[str, Any]={}): if puller: puller = self._zmq.socket(zmq.PULL) ip, port, host = self.rslv('rcv') puller.bind('tcp://{}:{}'.format(host or ip, port)) self.poll(puller) if subscriptions: for publisher in subscriptions: # type: str self.add(publisher, subscriptions[publisher].get('slots'), subscriptions[publisher].get('buffer-length')) logger.info('Listening to %s', { k: (1 if subscriptions[k].get('slots') is None else len(subscriptions[k].get('slots'))) for k in subscriptions })
1,113,719
Convert napoleon docstring to plain sphinx string. Args: docstring (str): Docstring in napoleon format. **config_params (dict): Whatever napoleon doc configuration you want. Returns: str: Sphinx string.
def napoleon_to_sphinx(docstring, **config_params): if "napoleon_use_param" not in config_params: config_params["napoleon_use_param"] = False if "napoleon_use_rtype" not in config_params: config_params["napoleon_use_rtype"] = False config = Config(**config_params) return str(GoogleDocstring(docstring, config))
1,113,729
Add or update value in configuration file format used by proftpd. Args: data (str): Configuration file as string. item (str): What option will be added/updated. value (str): Value of option. Returns: str: updated configuration
def add_or_update(data, item, value): data = data.splitlines() # to list of bytearrays (this is useful, because their reference passed to # other functions can be changed, and it will change objects in arrays # unlike strings) data = map(lambda x: bytearray(x), data) # search for the item in raw (ucommented) values conf = filter(lambda x: x.strip() and x.strip().split()[0] == item, data) if conf: conf[0][:] = conf[0].strip().split()[0] + " " + value else: # search for the item in commented values, if found, uncomment it comments = filter( lambda x: x.strip().startswith("#") and len(x.split("#")) >= 2 and x.split("#")[1].split() and x.split("#")[1].split()[0] == item, data ) if comments: comments[0][:] = comments[0].split("#")[1].split()[0] + " " + value else: # add item, if not found in raw/commented values data.append(item + " " + value + "\n") return "\n".join(map(lambda x: str(x), data))
1,113,940
Comments line containing `what` in string `data`. Args: data (str): Configuration file in string. what (str): Line which will be commented out. Returns: str: Configuration file with commented `what`.
def comment(data, what): data = data.splitlines() data = map( lambda x: "#" + x if x.strip().split() == what.split() else x, data ) return "\n".join(data)
1,113,941
Initializes class attributes Args: kwargs takes kwargs or a dict
def __init__(self, **kwargs): if kwargs: self.attributes = {} self.attributes.update(**kwargs) else: self.attributes = dict.fromkeys(self.__class__.disp_attr_keys)
1,114,396
Prints relevant attributes of an object Args: displayAll if True displays ALL class attributes.
def show(self, displayAll = False): from pprint import pprint if displayAll: pprint(self.attributes) else: disp_attr = {} for key in self.disp_attr_keys: try: disp_attr[key] = self.attributes[key] except KeyError: if key == 'lowercaseEmail': disp_attr[key] = disp_attr['email'].lower() else: disp_attr[key] = None pprint(disp_attr) del pprint
1,114,397
Returns relevant attributes as a dict. Args: rw if True only returns the read/write enabled object attributes
def to_dict(self, rw = False): return {k:v for (k,v) in self.attributes.items() if (v is not None and (not rw or (k in self.rw_attr_keys)))}
1,114,398
Gets either a slice or an item from an array. Used for the __getitem__ and __getslice__ special methods of the sub-classed array. Args: method (str): on of ['slice', 'item'].
def _get_acorn(self, method, *items): #IMPORTANT!! I lost two hours because the ndarray becomes unstable if you #don't call the original method first. Somehow passing the array instance to #other methods changed its internal representation and made it unusable by #the original numpy functions. Putting them first makes it work. # Because we had to subclass numpy.ndarray, the original methods get # stuck in an infinite loop (max. recursion depth exceeded errors). So, # we instead grab the reference to the original ndarray object. if method == "slice": r = np.ndarray.__acornext__.__getslice__(self, *items) else: r = np.ndarray.__acornext__.__getitem__(self, *items) if not (decorating or streamlining): from acorn.logging.decoration import (pre, post, _fqdn) if method == "slice": fqdn = "numpy.ndarray.__getslice__" else: fqdn = "numpy.ndarray.__getitem__" preres = pre(fqdn, np.ndarray, 5, self, *items) entry, bound, ekey = preres # This method can trick acorn into thinking that it is a bound # method. We want it to behave like it's not. post(fqdn, "numpy", r, entry, np.ndarray, ekey) return r
1,114,484
Inject ``settings`` into ``value``. Go through ``value`` looking for ``{{NAME}}`` groups and replace each group with the value of the named item from ``settings``. Args: value (str): The value to inject settings into settings: An object that provides the dotted access interface Returns: (str, bool): The new value and whether the new value is different from the original value
def _inject(self, value, settings): assert isinstance(value, string_types), 'Expected str; got {0.__class__}'.format(value) begin, end = '{{', '}}' if begin not in value: return value, False new_value = value begin_pos, end_pos = 0, None len_begin, len_end = len(begin), len(end) len_value = len(new_value) while begin_pos < len_value: # Find next {{. begin_pos = new_value.find(begin, begin_pos) if begin_pos == -1: break # Save everything before {{. before = new_value[:begin_pos] # Find }} after {{. begin_pos += len_begin end_pos = new_value.find(end, begin_pos) if end_pos == -1: raise ValueError('Unmatched {begin}...{end} in {value}'.format(**locals())) # Get name between {{ and }}, ignoring leading and trailing # whitespace. name = new_value[begin_pos:end_pos] name = name.strip() if not name: raise ValueError('Empty name in {value}'.format(**locals())) # Save everything after }}. after_pos = end_pos + len_end try: after = new_value[after_pos:] except IndexError: # Reached end of value. after = '' # Retrieve string value for named setting (the "injection # value"). try: injection_value = settings.get_dotted(name) except KeyError: raise KeyError('{name} not found in {settings}'.format(**locals())) if not isinstance(injection_value, string_types): injection_value = self.strategy.encode_value(injection_value) # Combine before, inject value, and after to get the new # value. new_value = ''.join((before, injection_value, after)) # Continue after injected value. begin_pos = len(before) + len(injection_value) len_value = len(new_value) return new_value, (new_value != value)
1,114,637
Returns the absolute path to the specified file/folder *relative to the repository root*. Args: fpath (str): path to a file or folder; doesn't need to exist.
def abspath(fpath): from os import path, getcwd, chdir original = getcwd() chdir(reporoot) result = path.abspath(path.expanduser(fpath)) chdir(original) return result
1,114,639
Store credentials required to satisfy a given auth scheme. Args: scheme (str): The name of the Authentication scheme. **params: parameters for the specified scheme. Returns: True if parameters are set successfully (note that this doesn't mean the credentials are valid) False if the scheme specified is not supported
def authorize(self, scheme, **params): if scheme not in self.schemes: return False for field, value in iteritems(params): setattr(self, field, value) if field in self.schemes[scheme][u'params'].keys() and value: self.schemes[scheme][u'params'][field] = value return True
1,114,680
Check whether all information required for a given auth scheme have been supplied. Args: scheme (str): Name of the authentication scheme to check. One of Gem-Identify, Gem-Device, Gem-Application Returns: True if all required parameters for the specified scheme are present or False otherwise.
def has_auth_params(self, scheme): for k, v in iteritems(self.schemes[scheme][u'params']): if not v: return False return True
1,114,681
Generate the format expected by HTTP Headers from parameters. Args: params (dict): {key: value} to convert to key=value Returns: A formatted header string.
def format_auth_params(params): parts = [] for (key, value) in params.items(): if value: parts.append('{}="{}"'.format(key, value)) return ", ".join(parts)
1,114,682
Returns the full path to the default package configuration file. Args: package (str): name of the python package to return a path for.
def _package_path(package): from os import path confdir = config_dir() return path.join(confdir, "{}.cfg".format(package))
1,114,856
Reads a single config file into the parser, silently failing if the file does not exist. Args: parser (ConfigParser): parser to read the file into. filepath (str): full path to the config file.
def _read_single(parser, filepath): from os import path global packages if path.isfile(filepath): parser.readfp(open(filepath))
1,114,857
Returns the config settings for the specified package. Args: package (str): name of the python package to get settings for.
def settings(package, reload_=False): global packages if package not in packages or reload_: from os import path result = CaseConfigParser() if package != "acorn": confpath = _package_path(package) _read_single(result, confpath) _read_single(result, _package_path("acorn")) packages[package] = result return packages[package]
1,114,858
Returns a dictionary of descriptors deserialized from JSON for the specified package. Args: package (str): name of the python package to get settings for.
def descriptors(package): from os import path dpath = _descriptor_path(package) if path.isfile(dpath): import json with open(dpath) as f: jdb = json.load(f) return jdb else: return None
1,114,859
Turn an animation into an automatically backspaced animation. Args: animation: A function that returns a generator that yields strings for animation frames. args: Arguments for the animation function. kwargs: Keyword arguments for the animation function. Returns: the animation generator, with backspaces applied to each but the first frame.
def _backspaced_single_line_animation(animation_, *args, **kwargs): animation_gen = animation_(*args, **kwargs) yield next(animation_gen) # no backing up on the first frame yield from util.concatechain( util.BACKSPACE_GEN(kwargs['width']), animation_gen)
1,114,991
Constructor. Args: func: If Animate is used without kwargs, then the function it decorates is passed in here. Otherwise, this is None. This argument should NOT be given directly via keyword assignment. animation_gen: A generator that yields strings for the animation. step: Seconds between each animation frame.
def __init__(self, func=None, *, animation_gen, step=.1): if not callable(func): raise TypeError("argument 'func' for {!r} must be " "callable".format(self.__class__.__name__)) self._raise_if_annotated(func) self._func = func self._animation_gen = animation_gen self._step = step functools.update_wrapper(self, func)
1,114,992
Raise TypeError if a function is decorated with Annotate, as such functions cause visual bugs when decorated with Animate. Animate should be wrapped by Annotate instead. Args: func (function): Any callable. Raises: TypeError
def _raise_if_annotated(self, func): if hasattr(func, ANNOTATED) and getattr(func, ANNOTATED): msg = ('Functions decorated with {!r} ' 'should not be decorated with {!r}.\n' 'Please reverse the order of the decorators!'.format( self.__class__.__name__, Annotate.__name__)) raise TypeError(msg)
1,114,994
Note that both arguments are keyword only arguments. Args: start_msg: A message to print before the function runs. end_msg: A message to print after the function has finished. start_no_nl: If True, no newline is appended after the start_msg.
def __init__(self, *, start_msg: Optional[str] = None, end_msg: Optional[str] = None, start_no_nl: bool = False): if start_msg is None and end_msg is None: raise ValueError( "At least one of 'start_msg' and 'end_msg' must be specified.") self._raise_if_not_none_nor_string(start_msg, "start_msg") self._raise_if_not_none_nor_string(end_msg, "end_msg") self._start_msg = start_msg self._end_msg = end_msg self._start_no_nl = start_no_nl
1,114,995
__call__ function for regular synchronous functions. Args: func: The annotated function. args: Arguments for func. kwargs: Keyword arguments for func.
def _sync_call(self, func): @functools.wraps(func) def wrapper(*args, **kwargs): if self._start_msg: self._start_print() result = func(*args, **kwargs) if self._end_msg: print(self._end_msg) return result setattr(wrapper, ANNOTATED, True) return wrapper
1,114,999
Builds a CustomMetar object from a CustomMetar object (returns it), an ICAO code or a METAR string Args: metar: CustomMetar object, ICAO string or METAR string Returns: CustomMetar object
def get_metar( metar: typing.Union[str, 'CustomMetar'] ) -> typing.Tuple[typing.Union[str, None], typing.Union['CustomMetar', None]]: error: typing.Optional[str] = None if isinstance(metar, CustomMetar): return None, metar if isinstance(metar, str): LOGGER.debug('building CustomMetar from: %s', metar) if len(metar) == 4: LOGGER.debug('retrieving METAR from ICAO') # NOAA has discontinued their hosting of raw METAR text files ... # error, metar = noaa.retrieve_metar(metar) # metar = avwx.AVWX.query_icao(metar).rawreport metar = AWC.query_icao(metar).raw_metar else: error = f'expected a string or or a CustomMetar object, got: {type(metar)}' if error: return error, None try: return None, CustomMetar(metar_code=metar) except ParserError: return f'Unable to parse METAR: {metar}', None
1,115,032
Save the config data Args: data: any serializable config data Raises: ConfigLoaderException: if the ConfigLoader.load not called, so there is no config file name, or the data is not serializable or the loader is nested
def save(self, data): if self.__nested: raise ConfigLoaderException("Cannot save the config if the 'nested' paramter is True!") if self.__loaded_config_file is None: raise ConfigLoaderException("Load not called yet!") try: with open(self.__loaded_config_file, 'w') as f: f.write(self.__formatter.encode(data)) except Exception as e: raise ConfigLoaderException("Config data is not serializable: %s" % e)
1,115,252
Creates a speakable text from a METAR Args: metar: METAR string to use Returns: speakable METAR for TTS
def metar_to_speech(metar: str) -> str: LOGGER.info('getting speech text from METAR: %s', metar) metar_data, metar_units = emiz.avwx.metar.parse_in(metar) speech = emiz.avwx.speech.metar(metar_data, metar_units) speech = str(speech).replace('Altimeter', 'Q N H') LOGGER.debug('resulting speech: %s', speech) return speech
1,115,256
Simple HREF for non-string and non-struct and non-array. Parameters: elt -- the DOM element being parsed ps -- the ParsedSoap object. tag --
def SimpleHREF(self, elt, ps, tag): if len(_children(elt)): return elt href = _find_href(elt) if not href: if self.minOccurs is 0: return None raise EvaluateException('Required' + tag + ' missing', ps.Backtrace(elt)) return ps.FindLocalHREF(href, elt, 0)
1,115,708
See if the name and type of the "elt" element is what we're looking for. Return the element's type. Parameters: elt -- the DOM element being parsed ps -- the ParsedSoap object.
def checkname(self, elt, ps): parselist,errorlist = self.get_parse_and_errorlist() ns, name = _get_element_nsuri_name(elt) if ns == SOAP.ENC: # Element is in SOAP namespace, so the name is a type. if parselist and \ (None, name) not in parselist and (ns, name) not in parselist: raise EvaluateException( 'Element mismatch (got %s wanted %s) (SOAP encoding namespace)' % \ (name, errorlist), ps.Backtrace(elt)) return (ns, name) # Not a type, check name matches. if self.nspname and ns != self.nspname: raise EvaluateException('Element NS mismatch (got %s wanted %s)' % \ (ns, self.nspname), ps.Backtrace(elt)) if self.pname and name != self.pname: raise EvaluateException('Element Name mismatch (got %s wanted %s)' % \ (name, self.pname), ps.Backtrace(elt)) return self.checktype(elt, ps)
1,115,710
See if the type of the "elt" element is what we're looking for. Return the element's type. Parameters: elt -- the DOM element being parsed ps -- the ParsedSoap object.
def checktype(self, elt, ps): typeName = _find_type(elt) if typeName is None or typeName == "": return (None,None) # Parse the QNAME. prefix,typeName = SplitQName(typeName) uri = ps.GetElementNSdict(elt).get(prefix) if uri is None: raise EvaluateException('Malformed type attribute (bad NS)', ps.Backtrace(elt)) #typeName = list[1] parselist,errorlist = self.get_parse_and_errorlist() if not parselist or \ (uri,typeName) in parselist or \ (_is_xsd_or_soap_ns(uri) and (None,typeName) in parselist): return (uri,typeName) raise EvaluateException( 'Type mismatch (%s namespace) (got %s wanted %s)' % \ (uri, typeName, errorlist), ps.Backtrace(elt))
1,115,711
Simple boolean test to see if we match the element name. Parameters: elt -- the DOM element being parsed
def name_match(self, elt): return self.pname == elt.localName and \ self.nspname in [None, '', elt.namespaceURI]
1,115,712
Is the element NIL, and is that okay? Parameters: elt -- the DOM element being parsed ps -- the ParsedSoap object.
def nilled(self, elt, ps): if _find_nil(elt) not in [ "true", "1"]: return False if self.nillable is False: raise EvaluateException('Non-nillable element is NIL', ps.Backtrace(elt)) return True
1,115,713
Get the value of the simple content of this element. Parameters: elt -- the DOM element being parsed ps -- the ParsedSoap object. mixed -- ignore element content, optional text node
def simple_value(self, elt, ps, mixed=False): if not _valid_encoding(elt): raise EvaluateException('Invalid encoding', ps.Backtrace(elt)) c = _children(elt) if mixed is False: if len(c) == 0: raise EvaluateException('Value missing', ps.Backtrace(elt)) for c_elt in c: if c_elt.nodeType == _Node.ELEMENT_NODE: raise EvaluateException('Sub-elements in value', ps.Backtrace(c_elt)) # It *seems* to be consensus that ignoring comments and # concatenating the text nodes is the right thing to do. return ''.join([E.nodeValue for E in c if E.nodeType in [ _Node.TEXT_NODE, _Node.CDATA_SECTION_NODE ]])
1,115,714
find all attributes specified in the attribute_typecode_dict in current element tag, if an attribute is found set it in the self.attributes dictionary. Default to putting in String. Parameters: elt -- the DOM element being parsed ps -- the ParsedSoap object.
def parse_attributes(self, elt, ps): if self.attribute_typecode_dict is None: return attributes = {} for attr,what in self.attribute_typecode_dict.items(): namespaceURI,localName = None,attr if type(attr) in _seqtypes: namespaceURI,localName = attr value = _find_attrNodeNS(elt, namespaceURI, localName) self.logger.debug("Parsed Attribute (%s,%s) -- %s", namespaceURI, localName, value) # For Now just set it w/o any type interpretation. if value is None: continue attributes[attr] = what.text_to_data(value, elt, ps) return attributes
1,115,715
Handles the start and end tags, and attributes. callout to get_formatted_content to get the textNode value. Parameters: elt -- ElementProxy/DOM element sw -- SoapWriter instance pyobj -- processed content KeyWord Parameters: name -- substitute name, (nspname,name) or name orig --
def serialize(self, elt, sw, pyobj, name=None, orig=None, **kw): objid = _get_idstr(pyobj) ns,n = self.get_name(name, objid) # nillable el = elt.createAppendElement(ns, n) if self.nillable is True and pyobj is Nilled: self.serialize_as_nil(el) return None # other attributes self.set_attributes(el, pyobj) # soap href attribute unique = self.unique or kw.get('unique', False) if unique is False and sw.Known(orig or pyobj): self.set_attribute_href(el, objid) return None # xsi:type attribute if kw.get('typed', self.typed) is True: self.set_attribute_xsi_type(el, **kw) # soap id attribute if self.unique is False: self.set_attribute_id(el, objid) #Content, <empty tag/>c self.serialize_text_node(el, sw, pyobj) return el
1,115,722
Recursively change ``mode`` for given ``path``. Same as ``chmod -R mode``. Args: path (str): Path of the directory/file. mode (octal int, default 0755): New mode of the file. Warning: Don't forget to add ``0`` at the beginning of the numbers of `mode`, or `Unspeakable hOrRoRs` will be awaken from their unholy sleep outside of the reality and they WILL eat your soul (and your files).
def recursive_chmod(path, mode=0755): passwd_reader.set_permissions(path, mode=mode) if os.path.isfile(path): return # recursively change mode of all subdirectories for root, dirs, files in os.walk(path): for fn in files + dirs: passwd_reader.set_permissions(os.path.join(root, fn), mode=mode)
1,116,719
Create lock file filled with :attr:`LOCK_FILE_CONTENT <ftp.settings.LOCK_FILE_CONTENT>`. Args: path (str): Path to the lock file. Made from users home directory and :attr:`LOCK_FILENAME <ftp.settings.LOCK_FILENAME>`.
def create_lock_file(path): with open(path, "w") as f: f.write(settings.LOCK_FILE_CONTENT) passwd_reader.set_permissions(path, gid=settings.PROFTPD_USERS_GID)
1,116,720
Adds record to passwd-like file for ProFTPD, creates home directory and sets permissions for important files. Args: username (str): User's name. password (str): User's password.
def add_user(username, password): assert _is_valid_username(username), \ "Invalid format of username '%s'!" % username assert username not in passwd_reader.load_users(), \ "User '%s' is already registered!" % username assert password, "Password is reqired!" # add new user to the proftpd's passwd file home_dir = settings.DATA_PATH + username sh.ftpasswd( passwd=True, # passwd file, not group file name=username, home=home_dir, # chroot in DATA_PATH shell="/bin/false", uid=settings.PROFTPD_USERS_GID, # TODO: parse dynamically? gid=settings.PROFTPD_USERS_GID, stdin=True, # tell ftpasswd to read password from stdin file=settings.LOGIN_FILE, _in=password ) # create home dir if not exists if not os.path.exists(home_dir): os.makedirs(home_dir, 0775) # I am using PROFTPD_USERS_GID (2000) for all our users - this GID # shouldn't be used by other than FTP users! passwd_reader.set_permissions(home_dir, gid=settings.PROFTPD_USERS_GID) passwd_reader.set_permissions(settings.LOGIN_FILE, mode=0600) create_lock_file(home_dir + "/" + settings.LOCK_FILENAME) reload_configuration()
1,116,721
Remove user, his home directory and so on.. Args: username (str): User's name.
def remove_user(username): users = passwd_reader.load_users() assert username in users, "Username '%s' not found!" % username # remove user from passwd file del users[username] passwd_reader.save_users(users) # remove home directory home_dir = settings.DATA_PATH + username if os.path.exists(home_dir): shutil.rmtree(home_dir) reload_configuration()
1,116,722
Change password for given `username`. Args: username (str): User's name. new_password (str): User's new password.
def change_password(username, new_password): assert username in passwd_reader.load_users(),\ "Username '%s' not found!" % username sh.ftpasswd( "--change-password", passwd=True, # passwd file, not group file name=username, stdin=True, # tell ftpasswd to read password from stdin file=settings.LOGIN_FILE, _in=new_password ) reload_configuration()
1,116,723
Verify an SMS or TOTP MFA token for this user. Args: mfa_token (str): An alphanumeric code from either a User's TOTP application or sent to them via SMS. Returns: True if the mfa_token is valid, False otherwise.
def verify_mfa(self, mfa_token): response = self.resource.verify_mfa({'mfa_token': mfa_token}) return (response['valid'] == True or response['valid'] == 'true')
1,116,846
Returns Astronomy Picture of the Day Args: date: date instance (default = today) hd: bool if high resolution should be included Returns: json
def get_apod(cls, date=None, hd=False): instance = cls('planetary/apod') filters = { 'date': date, 'hd': hd } return instance.get_resource(**filters)
1,117,217
Returns date and ids of flyovers Args: lat: latitude float lon: longitude float begin: date instance end: date instance Returns: json
def get_assets(cls, lat, lon, begin=None, end=None): instance = cls('planetary/earth/assets') filters = { 'lat': lat, 'lon': lon, 'begin': begin, 'end': end, } return instance.get_resource(**filters)
1,117,218
Returns satellite image Args: lat: latitude float lon: longitude float date: date instance of available date from `get_assets` dim: width and height of image in degrees as float cloud_score: boolean to calculate the percentage of the image covered by clouds Returns: json
def get_imagery(cls, lat, lon, date=None, dim=None, cloud_score=False): instance = cls('planetary/earth/imagery') filters = { 'lat': lat, 'lon': lon, 'date': date, 'dim': dim, 'cloud_score': cloud_score } return instance.get_resource(**filters)
1,117,219
Handles decoding of the XML `data`. Args: data (str): Data which will be decoded. Returns: dict: Dictionary with decoded data.
def decode(data): dom = None try: dom = dhtmlparser.parseString(data) except Exception, e: raise MetaParsingException("Can't parse your XML data: %s" % e.message) root = dom.find("root") # check whether there is <root>s if not root: raise MetaParsingException("All elements have to be inside <root>.") # and make sure, that there is not too many <root>s if len(root) > 1: raise MetaParsingException("Too many <root> elements in your XML!") items = root[0].find("item") # check for items if not items: raise MetaParsingException("There are no <items> in your XML <root>!") decoded = [] for item in items: if "key" not in item.params: raise MetaParsingException( "There is no 'key' parameter in %s." % str(item) ) decoded.append([ item.params["key"], item.getContent().strip() ]) decoded = validator.check_structure(decoded) return decoded
1,117,362
Returns: (np.poly1d): callable function of polynomial fit excluding all outliers Args: deg (int): degree of polynomial fit n_iter (int): do linear regression n times successive removing nstd (float): exclude outliers, if their deviation is > [nstd] * standard deviation return_outliers (bool): also return outlier positions as 2. arg
def polyFitIgnoringOutliers( x, y, deg=2, niter=3, nstd=2, return_outliers=False): if return_outliers: a = all_outliers = np.zeros_like(y, dtype=bool) for i in range(niter): poly = np.polyfit(x, y, deg) p = np.poly1d(poly) if i == niter - 1: break y_fit = p(x) dy = y - y_fit std = (dy**2).mean()**0.5 inliers = abs(dy) < nstd * std if return_outliers: a[~inliers] = True if inliers.sum() > deg + 1: x = x[inliers] y = y[inliers] if return_outliers: a = a[inliers] else: break if return_outliers: return p, all_outliers return p
1,117,411
Save variable on given path using Pickle Args: variable: what to save path (str): path of the output
def save(variable, filename): fileObj = open(filename, 'wb') pickle.dump(variable, fileObj) fileObj.close()
1,117,662
Load variable from Pickle file Args: path (str): path of the file to load Returns: variable read from path
def load(filename): fileObj = open(filename, 'rb') variable = pickle.load(fileObj) fileObj.close() return variable
1,117,663
Searches files satisfying query It first decompose the query in ngrams, then score each document containing at least one ngram with the number. The ten document having the most ngrams in common with the query are selected. Args: query (str): what to search; results_number (int): number of results to return (default: 10)
def search(self, query, verbose=0): if verbose > 0: print("searching " + query) query = query.lower() qgram = ng(query, self.slb) qocument = set() for q in qgram: if q in self.ngrams.keys(): for i in self.ngrams[q]: qocument.add(i) self.qocument = qocument results = {} for i in qocument: for j in self.D[i].keys(): if not j in results.keys(): results[j] = 0 results[j] = results[j] + self.D[i][j] sorted_results = sorted(results.items(), key=operator.itemgetter(1), reverse=True) return [self.elements[f[0]] for f in sorted_results]
1,117,668
Register a url (e.g. wallet.gem.co) for Args: name (str): human-readable wallet name (e.g. wallet) domain_name (str): the domain name to create subdomain on (e.g. gem.co) this domain must already be registered with Gem Returns: The new round.NetkiName
def create(self, name, domain_name): name = self.wrap(self.resource.create(dict(name=name, domain_name=domain_name))) self.add(name) return name
1,117,845
Re-orders a miz file into a folder (flattened) Args: miz_file_path: source miz file target_dir: folder to flatten the content into skip_options_file: do not re-order option file
def reorder( miz_file_path: typing.Union[str, Path], target_dir: typing.Union[str, Path], skip_options_file: bool, ): miz_file_path = elib.path.ensure_file(miz_file_path) target_dir_path = elib.path.ensure_dir(target_dir, must_exist=False) LOGGER.debug('re-ordering miz file: %s', miz_file_path) LOGGER.debug('destination folder: %s', target_dir) LOGGER.debug('%s option file', "skipping" if skip_options_file else "including") if not target_dir_path.exists(): LOGGER.debug('creating directory %s', target_dir_path) target_dir_path.mkdir(exist_ok=True) with Miz(miz_file_path, overwrite=True) as miz_: def mirror_dir(src: Path, dst: Path): LOGGER.debug('mirroring: %s -> %s', src, dst) LOGGER.debug('comparing directories') diff_ = dircmp(str(src), str(dst), ignore) diff_list = diff_.left_only + diff_.diff_files LOGGER.debug('differences: %s', diff_list) for __diff in diff_list: source = Path(diff_.left, __diff) target = Path(diff_.right, __diff) LOGGER.debug('looking at: %s', __diff) if source.is_dir(): LOGGER.debug('isdir: %s', __diff) if not target.exists(): LOGGER.debug('creating: %s', __diff) target.mkdir() mirror_dir(source, target) else: LOGGER.debug('copying: %s', __diff) shutil.copy2(str(source), diff_.right) for sub in diff_.subdirs.values(): mirror_dir(Path(sub.left), Path(sub.right)) # pylint: disable=protected-access miz_._encode() if skip_options_file: ignore = ['options'] else: ignore = [] mirror_dir(Path(miz_.temp_dir), target_dir_path)
1,117,903
Write mission, dictionary etc. to a MIZ file Args: destination: target MIZ file (if none, defaults to source MIZ + "_EMIZ" Returns: destination file
def zip(self, destination: typing.Union[str, Path] = None, encode: bool = True) -> str: if encode: self._encode() if destination is None: destination_path = self.miz_path.parent.joinpath(f'{self.miz_path.stem}_EMIZ.miz') else: destination_path = elib.path.ensure_file(destination, must_exist=False) LOGGER.debug('zipping mission to: %s', destination_path) destination_path.write_bytes(dummy_miz) with ZipFile(str(destination_path), mode='w', compression=8) as zip_file: for root, _, items in os.walk(self.temp_dir.absolute()): for item in items: item_abs_path = Path(root, item).absolute() item_rel_path = Path(item_abs_path).relative_to(self.temp_dir) zip_file.write(item_abs_path, arcname=item_rel_path) return str(destination_path)
1,117,904
Service configuration and logging setup. Configuration defined in ``gordon-janitor-user.toml`` will overwrite ``gordon-janitor.toml``. Args: config_root (str): where configuration should load from, defaults to current working directory. Returns: A dict for Gordon service configuration
def setup(config_root=''): config = _load_config(root=config_root) logging_config = config.get('core', {}).get('logging', {}) log_level = logging_config.get('level', 'INFO').upper() log_handlers = logging_config.get('handlers') or ['syslog'] ulogger.setup_logging( progname='gordon-janitor', level=log_level, handlers=log_handlers) return config
1,117,941
Get the name of fumctions or methods decorated with the specified decorator. If a method, the name will be as class_name.method_name. Args : path : The path to the module. decorator_module : The name of the module defining the decorator. decorator_name : The name of the decorator.
def get_decorated_names(path,decorator_module, decorator_name): #Read the source with open(path) as f: module_source = f.read() expression = '\s*@(?:{}\.)?{}[\s\S]+?(?:instance_creator\s*=\s*([\w.]+)[\s\S]+?)?\s*def\s+(\w+)\s*\(\s*(self)?'.format(decorator_module, decorator_name) methods = re.compile(expression).findall(module_source) decorateds = [x[1] for x in methods] instance_creators = [x[0] if x[0] != '' else None for x in methods] is_methods = [x[2]!='' for x in methods] result = [] class_name_expression = '\s*(?:class\s+(\w+)\s*\(.*\:\n[\s\S]+?)+?\s+def\s+{}\s*\(' #Get class names for methods for i in range(len(decorateds)) : decorated = decorateds[i] #If a method if is_methods[i] : #Get the class name index = decorateds[:i].count(decorated) class_name = re.compile(class_name_expression.format(decorated)).findall(module_source)[index] original_name = decorated instance_creator= instance_creators[i] if decorated[:2] =='__' : decorated = '_'+ class_name+ decorated result.append({'name': decorated, 'class_name': class_name, 'instance_creator': instance_creator}) else : result.append({'name': decorated}) if DEBUG : print( decorateds , instance_creators , is_methods , result ) return result
1,118,112
Returns an instance of the method specified. Args : path : The path to the module contianing the method or function. method : The name of the function. class_name : The name of the class if the funtion is a method. instance_creator: The name of the method to return the class instance.
def load_method(path,method,class_name = None,instance_creator = None): #Load the module module = load_module(path) if class_name : #If a class, Create an instance class_type = getattr(module, class_name) if instance_creator: ic_rest = instance_creator nxt = module while ('.' in ic_rest) : nxt = getattr(nxt , instance_creator.split('.')[0]) ic_rest = '.'.join(ic_rest.split('.')[1:]) instance = getattr(module, instance_creator)() else : instance = class_type() return getattr(instance , method) else : return getattr(module , method)
1,118,113
set up all methods representing the port operations. Parameters: port -- Port that defines the operations.
def setUpMethods(self, port): assert isinstance(port, WSDLTools.Port), \ 'expecting WSDLTools.Port not: ' %type(port) sd = self._services.get(port.getService().name) assert sd is not None, 'failed to initialize.' binding = port.getBinding() portType = port.getPortType() action_in = '' for bop in binding.operations: try: op = portType.operations[bop.name] except KeyError, ex: raise WsdlGeneratorError,\ 'Port(%s) PortType(%s) missing operation(%s) defined in Binding(%s)' \ %(port.name,portType.name,bop.name,binding.name) for ext in bop.extensions: if isinstance(ext, WSDLTools.SoapOperationBinding): action_in = ext.soapAction break else: warnings.warn('Port(%s) operation(%s) defined in Binding(%s) missing soapAction' \ %(port.name,op.name,binding.name) ) msgin = op.getInputMessage() msgin_name = TextProtect(msgin.name) method_name = self.getMethodName(op.name) m = sd.newMethod() print >>m, '%sdef %s(self, ps, **kw):' %(self.getIndent(level=1), method_name) if msgin is not None: print >>m, '%srequest = ps.Parse(%s.typecode)' %(self.getIndent(level=2), msgin_name) else: print >>m, '%s# NO input' %self.getIndent(level=2) msgout = op.getOutputMessage() if msgout is not None: msgout_name = TextProtect(msgout.name) print >>m, '%sreturn request,%s()' %(self.getIndent(level=2), msgout_name) else: print >>m, '%s# NO output' % self.getIndent(level=2) print >>m, '%sreturn request,None' % self.getIndent(level=2) print >>m, '' print >>m, '%ssoapAction[\'%s\'] = \'%s\'' %(self.getIndent(level=1), action_in, method_name) print >>m, '%sroot[(%s.typecode.nspname,%s.typecode.pname)] = \'%s\'' \ %(self.getIndent(level=1), msgin_name, msgin_name, method_name) return
1,118,122
set up all methods representing the port operations. Parameters: port -- Port that defines the operations.
def setUpMethods(self, port): assert isinstance(port, WSDLTools.Port), \ 'expecting WSDLTools.Port not: ' %type(port) binding = port.getBinding() portType = port.getPortType() service = port.getService() s = self._services[service.name] for bop in binding.operations: try: op = portType.operations[bop.name] except KeyError, ex: raise WsdlGeneratorError,\ 'Port(%s) PortType(%s) missing operation(%s) defined in Binding(%s)' \ %(port.name, portType.name, op.name, binding.name) soap_action = wsaction_in = wsaction_out = None if op.input is not None: wsaction_in = op.getInputAction() if op.output is not None: wsaction_out = op.getOutputAction() for ext in bop.extensions: if isinstance(ext, WSDLTools.SoapOperationBinding) is False: continue soap_action = ext.soapAction if not soap_action: break if wsaction_in is None: break if wsaction_in == soap_action: break if self.strict is False: warnings.warn(\ 'Port(%s) operation(%s) in Binding(%s) soapAction(%s) != WS-Action(%s)' \ %(port.name, op.name, binding.name, soap_action, wsaction_in), ) break raise WsdlGeneratorError,\ 'Port(%s) operation(%s) in Binding(%s) soapAction(%s) MUST match WS-Action(%s)' \ %(port.name, op.name, binding.name, soap_action, wsaction_in) method_name = self.getMethodName(op.name) m = s.newMethod() print >>m, '%sdef %s(self, ps, address):' %(self.getIndent(level=1), method_name) msgin_name = msgout_name = None msgin,msgout = op.getInputMessage(),op.getOutputMessage() if msgin is not None: msgin_name = TextProtect(msgin.name) if msgout is not None: msgout_name = TextProtect(msgout.name) indent = self.getIndent(level=2) for l in self.createMethodBody(msgin_name, msgout_name): print >>m, indent + l print >>m, '' print >>m, '%ssoapAction[\'%s\'] = \'%s\'' %(self.getIndent(level=1), wsaction_in, method_name) print >>m, '%swsAction[\'%s\'] = \'%s\'' %(self.getIndent(level=1), method_name, wsaction_out) print >>m, '%sroot[(%s.typecode.nspname,%s.typecode.pname)] = \'%s\'' \ %(self.getIndent(level=1), msgin_name, msgin_name, method_name)
1,118,129
Handles decoding of the YAML `data`. Args: data (str): Data which will be decoded. Returns: dict: Dictionary with decoded data.
def decode(data): decoded = None try: decoded = yaml.load(data) except Exception, e: e = e.message if e.message else str(e) raise MetaParsingException("Can't parse your YAML data: %s" % e) decoded = validator.check_structure(decoded) return decoded
1,118,149
Handles decoding of the JSON `data`. Args: data (str): Data which will be decoded. Returns: dict: Dictionary with decoded data.
def decode(data): decoded = None try: decoded = json.loads(data) except Exception, e: raise MetaParsingException("Can't parse your JSON data: %s" % e.message) decoded = validator.check_structure(decoded) return decoded
1,118,880
Calls the specified Trigger of another Area with the optionally given parameters. Args: target: The name of the target Area. trigger: The name of the Trigger. parameters: The parameters of the function call.
def trigger(self, target: str, trigger: str, parameters: Dict[str, Any]={}): pass
1,119,080
Return the coordinates of a point of intersection given two lines. Return None if the lines are parallel, but non-colli_near. Return an arbitrary point of intersection if the lines are colli_near. Parameters: line1 and line2: lines given by 4 points (x0,y0,x1,y1).
def intersection(line1, line2): x1, y1, x2, y2 = line1 u1, v1, u2, v2 = line2 (a, b), (c, d) = (x2 - x1, u1 - u2), (y2 - y1, v1 - v2) e, f = u1 - x1, v1 - y1 # Solve ((a,b), (c,d)) * (t,s) = (e,f) denom = float(a * d - b * c) if _near(denom, 0): # parallel # If colli_near, the equation is solvable with t = 0. # When t=0, s would have to equal e/b and f/d if b == 0 or d == 0: return None if _near(e / b, f / d): # colli_near px = x1 py = y1 else: return None else: t = (e * d - b * f) / denom # s = (a*f - e*c)/denom px = x1 + t * (x2 - x1) py = y1 + t * (y2 - y1) return px, py
1,119,304
Remove all given files. Args: files (list): List of filenames, which will be removed.
def _remove_files(files): logger.debug("Request for file removal (_remove_files()).") for fn in files: if os.path.exists(fn): logger.debug("Removing '%s'." % fn) os.remove(fn)
1,119,316
Pair `items` with same ISBN into `DataPair` objects. Args: items (list): list of items, which will be searched. Returns: list: list with paired items. Paired items are removed, `DataPair` is \ added instead.
def _isbn_pairing(items): NameWrapper = namedtuple("NameWrapper", ["name", "obj"]) metas = map( lambda x: NameWrapper(_just_name(x.filename), x), filter(lambda x: isinstance(x, MetadataFile), items) ) ebooks = map( lambda x: NameWrapper(_just_name(x.filename), x), filter(lambda x: isinstance(x, EbookFile), items) ) # simple pairing mechanism, which shouldn't be O^2 complex, but something # slightly better metas = sorted(metas, key=lambda x: x.name) ebooks = sorted(ebooks, key=lambda x: x.name, reverse=True) while metas: meta = metas.pop() if not isbn_validator.is_valid_isbn(meta.name): continue if not ebooks: break ebook_index = _index(ebooks, meta.name, key=lambda x: x.name) if ebook_index >= 0: logger.debug( "Pairing '%s' and '%s'." % ( meta.obj.filename, ebooks[ebook_index].obj.filename ) ) items.append( DataPair( metadata_file=meta.obj, ebook_file=ebooks[ebook_index].obj ) ) items.remove(meta.obj) items.remove(ebooks[ebook_index].obj) ebooks = ebooks[ebook_index+1:] return items
1,119,321
Makes sure that the name filters for the specified package have been loaded. Args: package (str): name of the package that this method belongs to. context (str): one of ['decorate', 'time', 'analyze']; specifies which section of the configuration settings to check.
def _get_name_filter(package, context="decorate", reparse=False): global name_filters pkey = (package, context) if pkey in name_filters and not reparse: return name_filters[pkey] from acorn.config import settings spack = settings(package) # The acorn.* sections allow for global settings that affect every package # that ever gets wrapped. sections = { "decorate": ["tracking", "acorn.tracking"], "time": ["timing", "acorn.timing"], "analyze": ["analysis", "acorn.analysis"] } filters, rfilters = None, None import re if context in sections: # We are interested in the 'filter' and 'rfilter' options if they exist. filters, rfilters = [], [] ignores, rignores = [], [] for section in sections[context]: if spack.has_section(section): options = spack.options(section) if "filter" in options: filters.extend(re.split(r"\s*\$\s*", spack.get(section, "filter"))) if "rfilter" in options: # pragma: no cover #Until now, the fnmatch filters have been the most #useful. So I don't have any unit tests for regex filters. pfilters = re.split(r"\s*\$\s*", spack.get(section, "rfilter")) rfilters.extend([re.compile(p, re.I) for p in pfilters]) if "ignore" in options: ignores.extend(re.split(r"\s*\$\s*", spack.get(section, "ignore"))) if "rignore" in options: # pragma: no cover pignores = re.split(r"\s*\$\s*", spack.get(section, "rignore")) rignores.extend([re.compile(p, re.I) for p in pfilters]) name_filters[pkey] = { "filters": filters, "rfilters": rfilters, "ignores": ignores, "rignores": rignores } else: name_filters[pkey] = None return name_filters[pkey]
1,119,446
Returns a string representation of the tracker object for the given item. Args: item: object to get tracker for. fqdn (str): fully-qualified domain name of the object.
def _tracker_str(item): instance = tracker(item) if instance is not None: if isinstance(instance, str): return instance elif isinstance(instance, tuple): return instance else: return instance.uuid else: #Must be a simple built-in type like `int` or `float`, in which case we #don't want to convert it to a string. return item
1,119,448
Returns the reduced function call stack that includes only relevant function calls (i.e., ignores any that are not part of the specified package or acorn. Args: package (str): name of the package that the logged method belongs to.
def _reduced_stack(istart=3, iend=5, ipython=True): import inspect return [i[istart:iend] for i in inspect.stack() if _decorated_path(i[1])]
1,119,450
Decorator for wrapping the creation of class instances that are being logged by acorn. Args: base: base class used to call __new__ for the construction. package (str): name of (global) package the class belongs to. stackdepth (int): if the calling stack is less than this depth, than include the entry in the log; otherwise ignore it.
def creationlog(base, package, stackdepth=_def_stackdepth): @staticmethod def wrapnew(cls, *argl, **argd): global _atdepth_new, _cstack_new, streamlining origstream = None if not (decorating or streamlining): entry, _atdepth_new = _pre_create(cls, _atdepth_new, stackdepth, *argl, **argd) _cstack_new.append(cls) #See if we need to enable streamlining for this constructor. fqdn = cls.__fqdn__ if fqdn in _streamlines and _streamlines[fqdn]: #We only use streamlining for the plotting routines at the #moment, so it doesn't get hit by the unit tests. msg.std("Streamlining {}.".format(fqdn), 2) origstream = streamlining streamlining = True try: if six.PY2: result = base.__old__(cls, *argl, **argd) else: # pragma: no cover #Python 3 changed the way that the constructors behave. In cases #where a class inherits only from object, and doesn't override #the __new__ method, the __old__ we replaced was just the one #belonging to object. if base.__old__ is object.__new__: result = base.__old__(cls) else: result = base.__old__(cls, *argl, **argd) except TypeError: # pragma: no cover #This is a crazy hack! We want this to be dynamic so that it can #work with any of the packages. If the error message suggests using #a different constructor, we go ahead and use it. import sys xcls, xerr = sys.exc_info()[0:2] referral = xerr.args[0].split()[-1] if ".__new__()" in referral: t = eval(referral.split('.')[0]) result = t.__new__(cls, *argl, **argd) else: raise result = None if result is not None and hasattr(cls, "__init__"): try: cls.__init__(result, *argl, **argd) except: # pragma: no cover print(cls, argl, argd) raise else: # pragma: no cover msg.err("Object initialize failed for {}.".format(base.__name__)) #If we don't disable streamlining for the original method that set #it, then the post call would never be reached. if origstream is not None: #We avoid another dict lookup by checking whether we set the #*local* origstream to something above. streamlining = origstream if not (decorating or streamlining): _cstack_new.pop() if len(_cstack_new) == 0: _atdepth_new = False _post_create(_atdepth_new, entry, result) return result return wrapnew
1,119,453
Adds logging for a call to the specified function that is being handled by an external module. Args: fqdn (str): fully-qualified domain name of the function being logged. parent: *object* that the function belongs to. stackdepth (int): maximum stack depth before entries are ignored. argl (list): positional arguments passed to the function call. argd (dict): keyword arguments passed to the function call.
def pre(fqdn, parent, stackdepth, *argl, **argd): global _atdepth_call, _cstack_call #We add +1 to stackdepth because this method had to be called in #addition to the wrapper method, so we would be off by 1. pcres = _pre_call(_atdepth_call, parent, fqdn, stackdepth+1, *argl, **argd) entry, _atdepth_call, reduced, bound, ekey = pcres _cstack_call.append(fqdn) return (entry, bound, ekey)
1,119,457
Safely sets the attribute of the specified object. This includes not setting attributes for final objects and setting __func__ for instancemethod typed objects. Args: obj: object to set an attribute for. name (str): new attribute name. value: new attribute value. Returns: bool: True if the set attribute was successful.
def _safe_setattr(obj, name, value): okey = id(obj) if okey in _set_failures or okey in _final_objs: return False import inspect try: if inspect.ismethod(obj): setattr(obj.__func__, name, value) return True else: if isinstance(obj, dict): # pragma: no cover obj[name] = value else: setattr(obj, name, value) return True except (TypeError, AttributeError):# pragma: no cover _set_failures.append(okey) msg.warn("Failed {}:{} attribute set on {}.".format(name, value, obj)) return False
1,119,459
Returns the fully qualified name of the object. Args: o (type): instance of the object's type. oset (bool): when True, the fqdn will also be set on the object as attribute `__fqdn__`. recheck (bool): for sub-classes, sometimes the super class has already had its __fqdn__ attribute set; in that case, we want to recheck the object's name. This usually only gets used during object extension.
def _fqdn(o, oset=True, recheck=False, pmodule=None): if id(o) in _set_failures or o is None: return None if recheck or not _safe_hasattr(o, "__fqdn__"): import inspect if not hasattr(o, "__name__"): msg.warn("Skipped object {}: no __name__ attribute.".format(o), 3) return result = None if hasattr(o, "__acornext__") and o.__acornext__ is not None: otarget = o.__acornext__ else: otarget = o omod = _safe_getmodule(otarget) or pmodule if (omod is None and hasattr(otarget, "__objclass__") and otarget.__objclass__ is not None): # pragma: no cover omod = _safe_getmodule(otarget.__objclass__) parts = ("<unknown>" if omod is None else omod.__name__, otarget.__objclass__.__name__, otarget.__name__) #msg.std("FQDN: objclass => {}".format(parts), 4) result = "{}.{}.{}".format(*parts) elif (omod is None and hasattr(otarget, "__class__") and otarget.__class__ is not None): omod = _safe_getmodule(otarget.__class__) parts = ("<unknown>" if omod is None else omod.__name__, otarget.__class__.__name__, otarget.__name__) #msg.std("FQDN: class => {}".format(parts), 4) result = "{}.{}.{}".format(*parts) elif omod is not otarget: parts = (_fqdn(omod, False), otarget.__name__) #msg.std("FQDN: o => {}".format(parts), 4) result = "{}.{}".format(*parts) else: result = otarget.__name__ if oset: _safe_setattr(o, "__fqdn__", result) return result if _safe_hasattr(o, "__fqdn__"): return o.__fqdn__
1,119,463
Loads the stack depth settings from the config file for the specified package. Args: package (str): name of the package to get stack depth info for. fqdn (str): fully qualified domain name of the member in the package. defdepth (int): default depth when one has not been configured.
def _get_stack_depth(package, fqdn, defdepth=_def_stackdepth): global _stack_config if package not in _stack_config: from acorn.config import settings spack = settings(package) _stack_config[package] = {} secname = "logging.depth" if spack.has_section(secname): for ofqdn in spack.options(secname): _stack_config[package][ofqdn] = spack.getint(secname, ofqdn) usedef = True if fqdn in _stack_config[package]: result = _stack_config[package][fqdn] usedef = False elif "*" in _stack_config[package]: # pragma: no cover result = _stack_config[package]["*"] usedef = False else: result = defdepth if not usedef: msg.gen("Using {} for {} stack depth.".format(result, fqdn), 3) return result
1,119,464
Loads the settings for generic options that take FQDN and a boolean value (1 or 0). Args: packname (str): name of the package to get config settings for. package: actual package object.
def _load_generic(packname, package, section, target): from acorn.config import settings spack = settings(packname) if spack.has_section(section): secitems = dict(spack.items(section)) for fqdn, active in secitems.items(): target[fqdn] = active == "1"
1,119,467
Loads the special call wrapping settings for functions in the specified package. This allows the result of the original method call to be cast as a different type, or passed to a different constructor before returning from the wrapped function. Args: packname (str): name of the package to get config settings for. package: actual package object.
def _load_callwraps(packname, package): global _callwraps from acorn.config import settings from acorn.logging.descriptors import _obj_getattr spack = settings(packname) if spack is not None: if spack.has_section("callwrap"): wrappings = dict(spack.items("callwrap")) for fqdn, target in wrappings.items(): caller = _obj_getattr(package, target) _callwraps[fqdn] = caller
1,119,468
Sign a transaction. Args: transaction (coinop.Transaction) Returns: A list of signature dicts of the form [ {'primary': 'base58signaturestring'}, ... ]
def signatures(self, transaction): # TODO: output.metadata['type']['change'] if not self.multi_wallet: raise DecryptionError("This wallet must be unlocked with " "wallet.unlock(passphrase)") return self.multi_wallet.signatures(transaction)
1,119,704
Create a new Application. Args: **kwargs: Arbitrary keyword arguments, including: name (str): A name for the new Application. Returns: A round.Application object if successful.
def create(self, **kwargs): resource = self.resource.create(kwargs) if 'admin_token' in kwargs: resource.context.authorize('Gem-Application', api_token=resource.api_token, admin_token=kwargs['admin_token']) app = self.wrap(resource) return self.add(app)
1,119,762
Resets any of the tokens for this Application. Note that you may have to reauthenticate afterwards. Usage: application.reset('api_token') application.reset('api_token', 'totp_secret') Args: *args (list of str): one or more of ['api_token', 'subscription_token', 'totp_secret'] Returns: The Application.
def reset(self, *args): self.resource = self.resource.reset(list(args)) return self
1,119,764
Create a new Account object and add it to this Accounts collection. Args: name (str): Account name network (str): Type of cryptocurrency. Can be one of, 'bitcoin', ' bitcoin_testnet', 'litecoin', 'dogecoin'. Returns: The new round.Account
def create(self, name, network): if not network in SUPPORTED_NETWORKS: raise ValueError('Network not valid!') account = self.wrap(self.resource.create(dict(name=name, network=network))) self.add(account) return account
1,119,871
Update the Account resource with specified content. Args: name (str): Human-readable name for the account Returns: the updated Account object.
def update(self, **kwargs): return self.__class__(self.resource.update(kwargs), self.client, wallet=self.wallet)
1,119,873
Applies the weather from a METAR object to a MIZ file Args: metar: metar object in_file: path to MIZ file out_file: path to output MIZ file (will default to in_file) Returns: tuple of error, success
def set_weather_from_metar( metar: typing.Union[Metar.Metar, str], in_file: typing.Union[str, Path], out_file: typing.Union[str, Path] = None ) -> typing.Tuple[typing.Union[str, None], typing.Union[str, None]]: error, metar = custom_metar.CustomMetar.get_metar(metar) if error: return error, None if metar: LOGGER.debug('METAR: %s', metar.code) in_file = elib.path.ensure_file(in_file) if out_file is None: out_file = in_file else: out_file = elib.path.ensure_file(out_file, must_exist=False) LOGGER.debug('applying metar: %s -> %s', in_file, out_file) try: LOGGER.debug('building MissionWeather') _mission_weather = mission_weather.MissionWeather(metar) with Miz(str(in_file)) as miz: _mission_weather.apply_to_miz(miz) miz.zip(str(out_file)) return None, f'successfully applied METAR to {in_file}' except ValueError: error = f'Unable to apply METAR string to the mission.\n' \ f'This is most likely due to a freak value, this feature is still experimental.\n' \ f'I will fix it ASAP !' return error, None
1,120,079
Safely translate url to relative filename Args: url (str): A target url string Returns: str
def url_to_filename(url): # remove leading/trailing slash if url.startswith('/'): url = url[1:] if url.endswith('/'): url = url[:-1] # remove pardir symbols to prevent unwilling filesystem access url = remove_pardir_symbols(url) # replace dots to underscore in filename part url = replace_dots_to_underscores_at_last(url) return url
1,120,288
Remove relative path symobls such as '..' Args: path (str): A target path string sep (str): A strint to refer path delimiter (Default: `os.sep`) pardir (str): A string to refer parent directory (Default: `os.pardir`) Returns: str
def remove_pardir_symbols(path, sep=os.sep, pardir=os.pardir): bits = path.split(sep) bits = (x for x in bits if x != pardir) return sep.join(bits)
1,120,289
Remove dot ('.') while a dot is treated as a special character in backends Args: path (str): A target path string Returns: str
def replace_dots_to_underscores_at_last(path): if path == '': return path bits = path.split('/') bits[-1] = bits[-1].replace('.', '_') return '/'.join(bits)
1,120,290
Loads the mappings from method call result to analysis. Args: package (str): name of the package to load for.
def _load_methods(package): global _methods _methods[package] = None from acorn.config import settings from acorn.logging.descriptors import _obj_getattr spack = settings(package) if spack is not None: if spack.has_section("analysis.methods"): _methods[package] = {} from importlib import import_module mappings = dict(spack.items("analysis.methods")) for fqdn, target in mappings.items(): rootname = target.split('.')[0] root = import_module(rootname) caller = _obj_getattr(root, target) _methods[package][fqdn] = caller
1,120,385
Analyzes the result from calling the method with the specified FQDN. Args: fqdn (str): full-qualified name of the method that was called. result: result of calling the method with `fqdn`. argl (tuple): positional arguments passed to the method call. argd (dict): keyword arguments passed to the method call.
def analyze(fqdn, result, argl, argd): package = fqdn.split('.')[0] if package not in _methods: _load_methods(package) if _methods[package] is not None and fqdn in _methods[package]: return _methods[package][fqdn](fqdn, result, *argl, **argd)
1,120,386
Builds a dummy METAR string from a mission file Args: mission_file: input mission file icao: dummy ICAO (defaults to XXXX) time: dummy time (defaults to now()) Returns: METAR str
def get_metar_from_mission( mission_file: str, icao: str = 'XXXX', time: str = None, ) -> str: return _MetarFromMission( mission_file=mission_file, icao=icao, time=time, ).metar
1,120,463
Set credentials for Identify authentication. Args: api_token (str): Token issued to your Application through the Gem Developer Console. override (boolean): Replace existing Application credentials.
def authenticate_identify(self, api_token, override=True): if (self.context.has_auth_params('Gem-Identify') and not override): raise OverrideError('Gem-Identify') if (not api_token or not self.context.authorize('Gem-Identify', api_token=api_token)): raise AuthUsageError(self.context, 'Gem-Identify') return True
1,120,503
Decompose this Miz into json Args: output_folder: folder to output the json structure as a Path miz_file: MIZ file path as a Path
def decompose(miz_file: Path, output_folder: Path): mission_folder, assets_folder = NewMiz._get_subfolders(output_folder) NewMiz._wipe_folders(mission_folder, assets_folder) LOGGER.info('unzipping mission file') with Miz(miz_file) as miz: version = miz.mission.d['version'] LOGGER.debug(f'mission version: "%s"', version) LOGGER.info('copying assets to: "%s"', assets_folder) ignore = shutil.ignore_patterns('mission') shutil.copytree(str(miz.temp_dir), str(assets_folder), ignore=ignore) NewMiz._reorder_warehouses(assets_folder) LOGGER.info('decomposing mission table into: "%s" (this will take a while)', mission_folder) NewMiz._decompose_dict(miz.mission.d, 'base_info', mission_folder, version, miz)
1,120,552
Recompose a Miz from json object Args: src: folder containing the json structure target_file: target Miz file
def recompose(src: Path, target_file: Path): mission_folder, assets_folder = NewMiz._get_subfolders(src) # pylint: disable=c-extension-no-member base_info = ujson.loads(Path(mission_folder, 'base_info.json').read_text(encoding=ENCODING)) version = base_info['__version__'] with Miz(target_file) as miz: LOGGER.info('re-composing mission table from folder: "%s"', mission_folder) miz.mission.d = NewMiz._recreate_dict_from_folder(mission_folder, version) for item in assets_folder.iterdir(): target = Path(miz.temp_dir, item.name).absolute() if item.is_dir(): if target.exists(): shutil.rmtree(target) shutil.copytree(item.absolute(), target) elif item.is_file(): shutil.copy(item.absolute(), target) miz.zip(target_file, encode=False)
1,120,553
Returns a python object from a python object string args: obj_str: python object path expamle "rdfframework.connections.ConnManager[{param1}]" kwargs: * kwargs used to format the 'obj_str'
def get_obj_frm_str(obj_str, **kwargs): obj_str = obj_str.format(**kwargs) args = [] kwargs = {} params = [] # parse the call portion of the string if "(" in obj_str: call_args = obj_str[obj_str.find("("):] obj_str = obj_str[:obj_str.find("(")] call_args = call_args[1:-1] if call_args: call_args = call_args.split(",") else: call_args = [] call_args = [arg.strip() for arg in call_args] for arg in call_args: if "=" in arg: parts = arg.split("=") kwargs[parts[0]] = parts[1] else: args.append(arg) # parse a the __getitem__ portion of the string if "[" in obj_str: params = obj_str[obj_str.find("["):] obj_str = obj_str[:obj_str.find("[")] params = [part.replace("[", "").replace("]", "") for part in params.split("][")] obj = pydoc.locate(obj_str) if params: for part in params: obj = get_attr(obj, part) if args or kwargs: if kwargs: obj = obj.__call__(*args, **kwargs) else: obj = obj.__call__(*args) return obj
1,120,917
converst a file path argment to the is path within the framework args: path: filepath to the python file
def pyfile_path(path): if "/" in path: parts = path.split("/") join_term = "/" elif "\\" in path: parts =path.split("\\") join_term = "\\" parts.reverse() base = parts[:parts.index('rdfframework')] base.reverse() return join_term.join(base)
1,120,918
Takes a subject predicate and object and joins them with a space in between Args: sub -- Subject pred -- Predicate obj -- Object Returns str
def make_triple(sub, pred, obj): return "{s} {p} {o} .".format(s=sub, p=pred, o=obj)
1,120,923
takes a dictionary object and a regular expression pattern and removes all keys that match the pattern. args: obj: dictionay object to search trhough regx_pattern: string without beginning and ending /
def delete_key_pattern(obj, regx_pattern): if isinstance(obj, list): _return_list = [] for item in obj: if isinstance(item, list): _return_list.append(delete_key_pattern(item, regx_pattern)) elif isinstance(item, set): _return_list.append(list(item)) elif isinstance(item, dict): _return_list.append(delete_key_pattern(item, regx_pattern)) else: try: json.dumps(item) _return_list.append(item) except: _return_list.append(str(type(item))) return _return_list elif isinstance(obj, set): return list(obj) elif isinstance(obj, dict): _return_obj = {} for key, item in obj.items(): if not re.match(regx_pattern, key): if isinstance(item, list): _return_obj[key] = delete_key_pattern(item, regx_pattern) elif isinstance(item, set): _return_obj[key] = list(item) elif isinstance(item, dict): _return_obj[key] = delete_key_pattern(item, regx_pattern) else: try: json.dumps(item) _return_obj[key] = item except: _return_obj[key] = str(type(item)) return _return_obj else: try: json.dumps(obj) return obj except: return str(type(obj))
1,120,926
similar to dict.get functionality but None value will return then if_none value args: item: dictionary to search key: the dictionary key if_none: the value to return if None is passed in strict: if False an empty string is treated as None
def get2(item, key, if_none=None, strict=True): if not strict and item.get(key) == "": return if_none elif item.get(key) is None: return if_none else: return item.get(key)
1,120,929
returns a dictionary of items based on the a lowercase search args: value: the value to search by
def find(self, value): value = str(value).lower() rtn_dict = RegistryDictionary() for key, item in self.items(): if value in key.lower(): rtn_dict[key] = item return rtn_dict
1,120,933
get the value off the passed in dot notation args: prop: a string of the property to retreive "a.b.c" ~ dictionary['a']['b']['c']
def get(self, prop): prop_parts = prop.split(".") val = None for part in prop_parts: if val is None: val = self.obj.get(part) else: val = val.get(part) return val
1,120,938