code
stringlengths
52
7.75k
docs
stringlengths
1
5.85k
def upload_site(self): if not os.path.isdir(self._config['build_dir']): message = 'Site not built at {0}'.format(self._config['build_dir']) self._logger.error(message) raise RuntimeError(message) ltdclient.upload(self._config)
Upload a previously-built site to LSST the Docs.
def libraries(): ls = libraries_dir().dirs() ls = [str(x.name) for x in ls] ls.sort() return ls
return installed library names.
def lib_examples(lib): d = lib_examples_dir(lib) if not d.exists(): return [] ls = d.dirs() ls = [x.name for x in ls] ls.sort() return ls
return library examples. EXAMPLE1,EXAMPLE2,..
def safe_eval(source, *args, **kwargs): source = source.replace('import', '') # import is not allowed return eval(source, *args, **kwargs)
eval without import
def fmt_num(num, zero_num=None): if zero_num is not None: num = floatformat(num, zero_num) return intcomma(num, False)
humanize number(9000 to 9,000)
def react(self, **kwargs): if self._type == "bounce": print "This shoreline type is NOT SUPPORTED and is broken" return self.__bounce(**kwargs) elif self._type == "reverse": return self.__reverse(**kwargs) else: return kwargs.get('hit_point') print "Not reacting to shoreline (sticky with inifinite concentration)"
Bounce off of a shoreline feature = Linestring of two points, being the line segment the particle hit. angle = decimal degrees from 0 (x-axis), couter-clockwise (math style)
def get_capabilities(self): d = {} ext = self._layer.GetExtent() # @TODO if a filter is on this may give different results llbb = [round(float(v), 4) for v in ext] d['LatLongBoundingBox'] = box(llbb[0], llbb[2], llbb[1], llbb[3]) d['Name'] = self._file.split('/')[-1].split('.')[0] return d
Gets capabilities. This is a simulation of a GetCapabilities WFS request. Returns a python dict with LatLongBoundingBox and Name keys defined.
def get_geoms_for_bounds(self, bounds): poly = ogr.CreateGeometryFromWkt(bounds) self._layer.SetSpatialFilter(poly) poly.Destroy() return [json.loads(e.GetGeometryRef().ExportToJson()) for e in self._layer]
Helper method to get geometries within a certain bounds (as WKT). Returns GeoJSON (loaded as a list of python dictionaries).
def get_capabilities(self): params = {'service' : 'WFS', 'request' : 'GetCapabilities', 'version' : '1.0.0'} caps_response = requests.get(self._wfs_server, params=params) caps_response.raise_for_status() return ET.fromstring(caps_response.content)
Gets capabilities. Queries WFS server for its capabilities. Internally used by get_feature_type_info.
def get_feature_type_info(self): caps = self.get_capabilities() if caps is None: return None el = caps.find('{http://www.opengis.net/wfs}FeatureTypeList') for e in el.findall('{http://www.opengis.net/wfs}FeatureType'): if e.find('{http://www.opengis.net/wfs}Name').text == self._feature_name: # transform into python dict # <Name>sample</Name> # <Abstract/> # <LatLongBoundingBox maxx="1" maxy="5" ... /> # # becomes: # # {'Name' :'sample', # 'Abtract' : None, # 'LatLongBoundingBox' : {'maxx':1, 'maxy':5 ... }} # d = {sube.tag[28:]:sube.text or sube.attrib or None for sube in e.getchildren()} # transform LatLongBoundingBox into a Shapely box llbb = {k:round(float(v), 4) for k,v in d['LatLongBoundingBox'].iteritems()} d['LatLongBoundingBox'] = box(llbb['minx'], llbb['miny'], llbb['maxx'], llbb['maxy']) return d return None
Gets FeatureType as a python dict. Transforms feature_name info into python dict.
def get_geoms_for_bounds(self, bounds): params = {'service' : 'WFS', 'request' : 'GetFeature', 'typeName' : self._feature_name, 'outputFormat' : 'json', 'version' : '1.0.0', 'bbox' : ','.join((str(b) for b in wkt.loads(bounds).bounds))} raw_geojson_response = requests.get(self._wfs_server, params=params) raw_geojson_response.raise_for_status() geojson = raw_geojson_response.json() return [g['geometry'] for g in geojson['features']]
Helper method to get geometries within a certain bounds (as WKT). Returns GeoJSON (loaded as a list of python dictionaries).
def extract_edges_from_callable(fn): def extractor(*args, **kwargs): """ Because I don't think this technique is common in python... Service constructors were defined as: lambda c: c('a') In this function: fn = lambda c: c('a') fn(anything) # Results in anything('a') Here we provide a function which returns all args/kwargs fn(extractor) # ["a"] This isn't voodoo, it's just treating a function's call if it is data. """ return list(args) + list(kwargs.values()) edges = fn(extractor) for edge in edges: if not isinstance(edge, str): raise ValueError('Provided edge "{}" is not a string'.format(edge)) return list(edges)
This takes args and kwargs provided, and returns the names of the strings assigned. If a string is not provided for a value, an exception is raised. This is how we extract the edges provided in the brap call lambdas.
def read_history_file(self, filename=None): u'''Load a readline history file. The default filename is ~/.history.''' if filename is None: filename = self.mode._history.history_filename log(u"read_history_file from %s"%ensure_unicode(filename)) self.mode._history.read_history_file(filenamef read_history_file(self, filename=None): u'''Load a readline history file. The default filename is ~/.history.''' if filename is None: filename = self.mode._history.history_filename log(u"read_history_file from %s"%ensure_unicode(filename)) self.mode._history.read_history_file(filename)
u'''Load a readline history file. The default filename is ~/.history.
def callback_handler_install(self, prompt, callback): u'''bool readline_callback_handler_install ( string prompt, callback callback) Initializes the readline callback interface and terminal, prints the prompt and returns immediately ''' self.callback = callback self.readline_setup(promptf callback_handler_install(self, prompt, callback): u'''bool readline_callback_handler_install ( string prompt, callback callback) Initializes the readline callback interface and terminal, prints the prompt and returns immediately ''' self.callback = callback self.readline_setup(prompt)
u'''bool readline_callback_handler_install ( string prompt, callback callback) Initializes the readline callback interface and terminal, prints the prompt and returns immediately
def callback_read_char(self): u'''Reads a character and informs the readline callback interface when a line is received''' if self.keyboard_poll(): line = self.get_line_buffer() + u'\n' # however there is another newline added by # self.mode.readline_setup(prompt) which is called by callback_handler_install # this differs from GNU readline self.add_history(self.mode.l_buffer) # TADA: self.callback(linef callback_read_char(self): u'''Reads a character and informs the readline callback interface when a line is received''' if self.keyboard_poll(): line = self.get_line_buffer() + u'\n' # however there is another newline added by # self.mode.readline_setup(prompt) which is called by callback_handler_install # this differs from GNU readline self.add_history(self.mode.l_buffer) # TADA: self.callback(line)
u'''Reads a character and informs the readline callback interface when a line is received
def _bell(self): u'''ring the bell if requested.''' if self.bell_style == u'none': pass elif self.bell_style == u'visible': raise NotImplementedError(u"Bellstyle visible is not implemented yet.") elif self.bell_style == u'audible': self.console.bell() else: raise ReadlineError(u"Bellstyle %s unknown."%self.bell_stylef _bell(self): u'''ring the bell if requested.''' if self.bell_style == u'none': pass elif self.bell_style == u'visible': raise NotImplementedError(u"Bellstyle visible is not implemented yet.") elif self.bell_style == u'audible': self.console.bell() else: raise ReadlineError(u"Bellstyle %s unknown."%self.bell_style)
u'''ring the bell if requested.
def callback_read_char(self): #Override base to get automatic newline u'''Reads a character and informs the readline callback interface when a line is received''' if self.keyboard_poll(): line = self.get_line_buffer() + u'\n' self.console.write(u"\r\n") # however there is another newline added by # self.mode.readline_setup(prompt) which is called by callback_handler_install # this differs from GNU readline self.add_history(self.mode.l_buffer) # TADA: self.callback(linef callback_read_char(self): #Override base to get automatic newline u'''Reads a character and informs the readline callback interface when a line is received''' if self.keyboard_poll(): line = self.get_line_buffer() + u'\n' self.console.write(u"\r\n") # however there is another newline added by # self.mode.readline_setup(prompt) which is called by callback_handler_install # this differs from GNU readline self.add_history(self.mode.l_buffer) # TADA: self.callback(line)
u'''Reads a character and informs the readline callback interface when a line is received
def __get_param(self, param): try: return self.request_json[param] except KeyError: raise FnexchangeError(400, 'Bad request: {0} is required'.format(param))
:param param: parameter name to look for in request body :type param: str :rtype: str
def set_list_attributes(element1, element2, attributes): for attribute in attributes: if element1.has_attribute(attribute): element2.set_attribute( attribute, element1.get_attribute(attribute) )
Copy a list of attributes of a element for other element. :param element1: The element that have attributes copied. :type element1: hatemile.util.html.htmldomelement.HTMLDOMElement :param element2: The element that copy the attributes. :type element2: hatemile.util.html.htmldomelement.HTMLDOMElement :param attributes: The list of attributes that will be copied. :type attributes: list(str)
def increase_in_list(list_to_increase, string_to_increase): if (bool(list_to_increase)) and (bool(string_to_increase)): if CommonFunctions.in_list(list_to_increase, string_to_increase): return list_to_increase return list_to_increase + ' ' + string_to_increase elif bool(list_to_increase): return list_to_increase return string_to_increase
Increase a item in a HTML list. :param list_to_increase: The list. :type list_to_increase: str :param string_to_increase: The value of item. :type string_to_increase: str :return: The HTML list with the item added, if the item not was contained in list. :rtype: str
def in_list(list_to_search, string_to_search): if (bool(list_to_search)) and (bool(string_to_search)): elements = re.split('[ \n\t\r]+', list_to_search) for element in elements: if element == string_to_search: return True return False
Verify if the list contains the item. :param list_to_search: The list. :type list_to_search: str :param string_to_search: The value of item. :type string_to_search: str :return: True if the list contains the item or False is not contains. :rtype: bool
def is_valid_element(element): if element.has_attribute(CommonFunctions.DATA_IGNORE): return False else: parent_element = element.get_parent_element() if parent_element is not None: tag_name = parent_element.get_tag_name() if (tag_name != 'BODY') and (tag_name != 'HTML'): return CommonFunctions.is_valid_element(parent_element) return True return True
Check that the element can be manipulated by HaTeMiLe. :param element: The element :type element: hatemile.util.html.htmldomelement.HTMLDOMElement :return: True if element can be manipulated or False if element cannot be manipulated. :rtype: bool
def get_joke(): page = requests.get("https://api.chucknorris.io/jokes/random") if page.status_code == 200: joke = json.loads(page.content.decode("UTF-8")) return joke["value"] return None
Returns a joke from the WebKnox one liner API. Returns None if unable to retrieve a joke.
def burnin(self, n): self.sediment_rate = self.sediment_rate[:, n:] self.headage = self.headage[n:] self.sediment_memory = self.sediment_memory[n:] self.objective = self.objective[n:]
Remove the earliest n ensemble members from the MCMC output
def update_current_time(loop): global current_time current_time = time() loop.call_later(1, partial(update_current_time, loop))
Cache the current time, since it is needed at the end of every keep-alive request to update the request timeout time :param loop: :return:
def options(self, parser, env=None): if env is None: env = os.environ env_opt_name = 'NOSE_%s' % self.__dest_opt_name.upper() parser.add_option("--%s" % self.__opt_name, dest=self.__dest_opt_name, type="string", default=env.get(env_opt_name), help=".ini file providing the environment for the " "test web application.")
Adds command-line options for this plugin.
def configure(self, options, conf): super(EverestNosePlugin, self).configure(options, conf) opt_val = getattr(options, self.__dest_opt_name, None) if opt_val: self.enabled = True EverestIni.ini_file_path = opt_val
Configures the plugin.
def get_app_url(self): section = 'server:main' if self.has_setting(section, 'host'): host = self.get_setting(section, 'host') else: host = '0.0.0.0' if self.has_setting(section, 'port'): port = int(self.get_setting(section, 'port')) else: port = 6543 return 'http://%s:%d' % (host, port)
Returns the application URL as defined in the INI section `server:main`. Defaults to `http://0.0.0.0/6543`.
def destroy(self, eip_or_aid, disassociate=False): if "." in eip_or_aid: # If an IP is given (Classic) # NOTE: EIPs are automatically disassociated for Classic instances. return "true" == self.call("ReleaseAddress", response_data_key="return", PublicIp=eip_or_aid) else: # If an AID is given (VPC) if disassociate: self.disassociate(eip_or_aid) return "true" == self.call("ReleaseAddress", response_data_key="return", AllocationId=eip_or_aid)
Release an EIP. If the EIP was allocated for a VPC instance, an AllocationId(aid) must be provided instead of a PublicIp. Setting disassociate to True will attempt to disassociate the IP before releasing it (required for associated nondefault VPC instances).
def associate(self, eip_or_aid, instance_id='', network_interface_id='', private_ip=''): if "." in eip_or_aid: # If an IP is given (Classic) return self.call("AssociateAddress", PublicIp=eip_or_aid, InstanceId=instance_id, NetworkInterfaceId=network_interface_id, PrivateIpAddress=private_ip) else: # If an AID is given (VPC) return self.call("AssociateAddress", AllocationId=eip_or_aid, InstanceId=instance_id, NetworkInterfaceId=network_interface_id, PrivateIpAddress=private_ip)
Associate an EIP with a given instance or network interface. If the EIP was allocated for a VPC instance, an AllocationId(aid) must be provided instead of a PublicIp.
def disassociate(self, eip_or_aid): if "." in eip_or_aid: # If an IP is given (Classic) return "true" == self.call("DisassociateAddress", response_data_key="return", PublicIp=eip_or_aid) else: # If an AID is given (VPC) return "true" == self.call("DisassociateAddress", response_data_key="return", AllocationId=eip_or_aid)
Disassociates an EIP. If the EIP was allocated for a VPC instance, an AllocationId(aid) must be provided instead of a PublicIp.
def get(self, instance_ids=None, filters=None): params = {} if filters: params["filters"] = make_filters(filters) if instance_ids: params['InstanceIds'] = instance_ids reservations = self.call("DescribeInstances", response_data_key="Reservations", **params) if reservations: return list(chain(*(r["Instances"] for r in reservations))) return []
List instance info.
def create(self, ami, count, config=None): return self.Launcher(config=config).launch(ami, count)
Create an instance using the launcher.
def control(self, instances, action): if not isinstance(instances, list) and\ not isinstance(instances, tuple): instances = [instances] actions = {'start': {'operation': "StartInstances", 'response_data_key': "StartingInstances", 'InstanceIds': instances}, 'stop': {'operation': "StopInstances", 'response_data_key': "StoppingInstances", 'InstanceIds': instances}, 'reboot': {'operation': "RebootInstances", 'response_data_key': "return", 'InstanceIds': instances}, 'terminate': {'operation': "TerminateInstances", 'response_data_key': "TerminatingInstances", 'InstanceIds': instances}, 'protect': {'operation': "ModifyInstanceAttribute", 'response_data_key': "return", 'Attribute': 'disableApiTermination', 'Value': 'true'}, 'unprotect': {'operation': "ModifyInstanceAttribute", 'response_data_key': "return", 'Attribute': 'disableApiTermination', 'Value': 'false'}} if (action in ('protect', 'unprotect')): for instance in instances: self.call(InstanceId=instance, **actions[action]) return "true" else: return self.call(**actions[action])
Valid actions: start, stop, reboot, terminate, protect, and unprotect.
def Launcher(self, config=None): class _launcher(EC2ApiClient): """Configurable launcher for EC2 instances. Create the Launcher (passing an optional dict of its attributes), set its attributes (as described in the RunInstances API docs), then launch(). """ def __init__(self, aws, config): super(_launcher, self).__init__(aws) self.config = config self._attr = list(self.__dict__.keys()) + ['_attr'] def launch(self, ami, min_count, max_count=0): """Use given AMI to launch min_count instances with the current configuration. Returns instance info list. """ params = config.copy() params.update(dict([i for i in self.__dict__.items() if i[0] not in self._attr])) return self.call("RunInstances", ImageId=ami, MinCount=min_count, MaxCount=max_count or min_count, response_data_key="Instances", **params) if not config: config = {} return _launcher(self._aws, config)
Provides a configurable launcher for EC2 instances.
def status(self, all_instances=None, instance_ids=None, filters=None): params = {} if filters: params["filters"] = make_filters(filters) if instance_ids: params['InstanceIds'] = instance_ids if all_instances is not None: params['IncludeAllInstances'] = all_instances statuses = self.call("DescribeInstanceStatus", response_data_key="InstanceStatuses", **params) return statuses
List instance info.
def events(self, all_instances=None, instance_ids=None, filters=None): params = {} if filters: params["filters"] = make_filters(filters) if instance_ids: params['InstanceIds'] = instance_ids statuses = self.status(all_instances, **params) event_list = [] for status in statuses: if status.get("Events"): for event in status.get("Events"): event[u"InstanceId"] = status.get('InstanceId') event_list.append(event) return event_list
a list of tuples containing instance Id's and event information
def modify(self, api_action, sgid, other, proto_spec): params = {'group_id': sgid, 'ip_permissions': []} perm = {} params['ip_permissions'].append(perm) proto, from_port, to_port = proto_spec perm['IpProtocol'] = proto perm['FromPort'] = from_port or 0 perm['ToPort'] = to_port or from_port or 65535 if other.startswith("sg-"): perm['UserIdGroupPairs'] = [{'GroupId': other}] elif "/sg-" in other: account, group_id = other.split("/", 1) perm['UserIdGroupPairs'] = [{ 'UserId': account, 'GroupId': group_id, }] else: perm['IpRanges'] = [{'CidrIp': other}] return self.call(api_action, **params)
Make a change to a security group. api_action is an EC2 API name. Other is one of: - a group (sg-nnnnnnnn) - a group with account (<user id>/sg-nnnnnnnn) - a CIDR block (n.n.n.n/n) Proto spec is a triplet (<proto>, low_port, high_port).
def add(self, sgid, other, proto_spec, direction="in"): # returns bool # AuthorizeSecurityGroupIngress, AuthorizeSecurityGroupEgress if direction == "in": api = "AuthorizeSecurityGroupIngress" elif direction == "out": api = "AuthorizeSecurityGroupEgress" else: raise ValueError("direction must be one of ('in', 'out')") return self.modify(api, sgid, other, proto_spec)
Add a security group rule to group <sgid>. Direction is either 'in' (ingress) or 'out' (egress). See modify() for other parameters.
def get(self, volume_ids=None, filters=None): params = {} if filters: params["filters"] = make_filters(filters) if isinstance(volume_ids, str): volume_ids = [volume_ids] return self.call("DescribeVolumes", VolumeIds=volume_ids, response_data_key="Volumes", **params)
List EBS Volume info.
def create(self, az, size_or_snap, volume_type=None, iops=None, encrypted=True): kwargs = {} kwargs['encrypted'] = encrypted if volume_type: kwargs['VolumeType'] = volume_type if iops: kwargs['Iops'] = iops is_snapshot_id = False try: size_or_snap = int(size_or_snap) except ValueError: is_snapshot_id = True if is_snapshot_id: return self.call("CreateVolume", AvailabilityZone=az, SnapshotId=size_or_snap, **kwargs) return self.call("CreateVolume", AvailabilityZone=az, Size=size_or_snap, **kwargs)
Create an EBS Volume using an availability-zone and size_or_snap parameter, encrypted by default. If the volume is crated from a snapshot, (str)size_or_snap denotes the snapshot id. Otherwise, (int)size_or_snap denotes the amount of GiB's to allocate. iops must be set if the volume type is io1.
def attach(self, volume_id, instance_id, device_path): return self.call("AttachVolume", VolumeId=volume_id, InstanceId=instance_id, Device=device_path)
Attach a volume to an instance, exposing it with a device name.
def detach(self, volume_id, instance_id='', device_path='', force=False): return self.call("DetachVolume", VolumeId=volume_id, InstanceId=instance_id, Device=device_path, force=force)
Detach a volume from an instance.
def save(self, *args, **kwargs): self.slug = self.create_slug() super(Slugable, self).save(*args, **kwargs)
Overrides the save method
def create_slug(self): name = self.slug_source counter = 0 # loops until slug is unique while True: if counter == 0: slug = slugify(name) else: # using the counter var to bump the slug name slug = slugify('{0} {1}'.format(name, str(counter))) try: # does the slug already exist, excluding the current object self.__class__.objects.exclude(pk=self.pk).get(slug=slug) # if slug exists increment counter and loop counter += 1 except ObjectDoesNotExist: # the slug does not exist # we can break from the loop break return slug
Creates slug, checks if slug is unique, and loop if not
def get_paths(folder, ignore_endswith=ignore_endswith): '''Return hologram file paths Parameters ---------- folder: str or pathlib.Path Path to search folder ignore_endswith: list List of filename ending strings indicating which files should be ignored. ''' folder = pathlib.Path(folder).resolve() files = folder.rglob("*") for ie in ignore_endswith: files = [ff for ff in files if not ff.name.endswith(ie)] return sorted(filesf get_paths(folder, ignore_endswith=ignore_endswith): '''Return hologram file paths Parameters ---------- folder: str or pathlib.Path Path to search folder ignore_endswith: list List of filename ending strings indicating which files should be ignored. ''' folder = pathlib.Path(folder).resolve() files = folder.rglob("*") for ie in ignore_endswith: files = [ff for ff in files if not ff.name.endswith(ie)] return sorted(files)
Return hologram file paths Parameters ---------- folder: str or pathlib.Path Path to search folder ignore_endswith: list List of filename ending strings indicating which files should be ignored.
def call(func, args): assert hasattr(func, '__call__'), 'Cannot call func: {}'.format( func.__name__) raw_func = ( func if isinstance(func, FunctionType) else func.__class__.__call__) hints = collections.defaultdict(lambda: Any, get_type_hints(raw_func)) argspec = _getargspec(raw_func) named_args = {} varargs = () for k, nk, v in _normalize(args): if nk == argspec.varargs: hints[nk] = Tuple[hints[nk], ...] elif nk not in argspec.args and argspec.varkw in hints: hints[nk] = hints[argspec.varkw] try: value = cast(hints[nk], v) except TypeError as e: _LOGGER.exception(e) six.raise_from(exc.InvalidCliValueError(k, v), e) if nk == argspec.varargs: varargs = value elif (nk in argspec.args or argspec.varkw) and ( nk not in named_args or named_args[nk] is None): named_args[nk] = value return func(*varargs, **named_args)
Call the function with args normalized and cast to the correct types. Args: func: The function to call. args: The arguments parsed by docopt. Returns: The return value of func.
def get_callable(subcommand): # type: (config.RcliEntryPoint) -> Union[FunctionType, MethodType] _LOGGER.debug( 'Creating callable from subcommand "%s".', subcommand.__name__) if isinstance(subcommand, ModuleType): _LOGGER.debug('Subcommand is a module.') assert hasattr(subcommand, 'Command'), ( 'Module subcommand must have callable "Command" class definition.') callable_ = subcommand.Command # type: ignore else: callable_ = subcommand if any(isinstance(callable_, t) for t in six.class_types): return callable_() return callable_
Return a callable object from the subcommand. Args: subcommand: A object loaded from an entry point. May be a module, class, or function. Returns: The callable entry point for the subcommand. If the subcommand is a function, it will be returned unchanged. If the subcommand is a module or a class, an instance of the command class will be returned. Raises: AssertionError: Raised when a module entry point does not have a callable class named Command.
def _getargspec(func): argspec = _getspec(func) args = list(argspec.args) if argspec.varargs: args += [argspec.varargs] if argspec[2]: # "keywords" in PY2 and "varkw" in PY3 args += [argspec[2]] return _ArgSpec(args, argspec.varargs, argspec[2])
Return a Python 3-like argspec object. Note: args contains varargs and varkw if they exist. This behavior differs from getargspec and getfullargspec. Args: func: The function to inspect. Returns: A named tuple with three parameters: args: All named arguments, including varargs and varkw if they are not None. varargs: The name of the *args variable. May be None. varkw: The name of the **kwargs variable. May be None.
def _normalize(args): # type: (Dict[str, Any]) -> Generator[Tuple[str, str, Any], None, None] for k, v in six.iteritems(args): nk = re.sub(r'\W|^(?=\d)', '_', k).strip('_').lower() do_not_shadow = dir(six.moves.builtins) # type: ignore if keyword.iskeyword(nk) or nk in do_not_shadow: nk += '_' _LOGGER.debug('Normalized "%s" to "%s".', k, nk) yield k, nk, v
Yield a 3-tuple containing the key, a normalized key, and the value. Args: args: The arguments parsed by docopt. Yields: A 3-tuple that contains the docopt parameter name, the parameter name normalized to be a valid python identifier, and the value assigned to the parameter.
def copy(self, filename=None): dst = os.path.join(self.dst_path, filename) src = os.path.join(self.src_path, filename) dst_tmp = os.path.join(self.dst_tmp, filename) self.put(src=src, dst=dst_tmp, callback=self.update_progress, confirm=True) self.rename(src=dst_tmp, dst=dst)
Puts on destination as a temp file, renames on the destination.
def __create(self): self.__data = json.dumps({ 'config_path': self.encode(self.config_path), 'subject': self.encode(self.__subject), 'text': self.encode(self.__text), 'html': self.encode(self.__html), 'files': self.__files, 'send_as_one': self.send_as_one, 'addresses': self.__addresses, 'ccs': self.__ccs, })
Construct the email
def send(self): self.__create() email_script = \ os.path.join(Path(__file__).parents[1], 'scripts', 'sendemail.py') if os.path.exists(email_script): subprocess.Popen( [get_python_path(), email_script, self.__data], stdin=None, stdout=None, stderr=None, close_fds=True)
Construct and execute sendemail.py script Finds python binary by os.py, then uses the /usr/bin/python to execute email script
def load(self, filename, bs=512): self.__filename = filename self.__volumes = [] # Detect partitioning scheme self.__partition_scheme = rawdisk.scheme.common.detect_scheme(filename) plugin_objects = [plugin.plugin_object for plugin in self.__fs_plugins] fs_detector = FilesystemDetector(fs_plugins=plugin_objects) if self.__partition_scheme == PartitionScheme.SCHEME_MBR: self.__load_mbr_volumes(filename, fs_detector, bs) elif self.__partition_scheme == PartitionScheme.SCHEME_GPT: self.__load_gpt_volumes(filename, fs_detector, bs) else: self.logger.warning('Partitioning scheme could not be determined.') # try detecting standalone volume volume = fs_detector.detect_standalone(filename, offset=0) if volume is not None: volume.load(filename, offset=0) self.__volumes.append(volume) else: self.logger.warning( 'Were not able to detect standalone volume type')
Starts filesystem analysis. Detects supported filesystems and \ loads :attr:`partitions` array. Args: filename - Path to file or device for reading. Raises: IOError - File/device does not exist or is not readable.
async def async_get_camera_image(self, image_name, username=None, password=None): try: data = await self.async_fetch_image_data( image_name, username, password) if data is None: raise XeomaError('Unable to authenticate with Xeoma web ' 'server') return data except asyncio.TimeoutError: raise XeomaError('Connection timeout while fetching camera image.') except aiohttp.ClientError as e: raise XeomaError('Unable to fetch image: {}'.format(e))
Grab a single image from the Xeoma web server Arguments: image_name: the name of the image to fetch (i.e. image01) username: the username to directly access this image password: the password to directly access this image
async def async_fetch_image_data(self, image_name, username, password): params = {} cookies = self.get_session_cookie() if username is not None and password is not None: params['user'] = self.encode_user(username, password) else: params['user'] = '' async with aiohttp.ClientSession(cookies=cookies) as session: resp = await session.get( '{}/{}.jpg'.format(self._base_url, image_name), params=params ) if resp.headers['Content-Type'] == 'image/jpeg': data = await resp.read() else: data = None return data
Fetch image data from the Xeoma web server Arguments: image_name: the name of the image to fetch (i.e. image01) username: the username to directly access this image password: the password to directly access this image
async def async_get_image_names(self): cookies = self.get_session_cookie() try: async with aiohttp.ClientSession(cookies=cookies) as session: resp = await session.get( self._base_url ) t = await resp.text() match = re.findall('(?:\w|\d|")/(.*?).(?:mjpg|jpg)', t) if len(match) == 0: raise XeomaError('Unable to find any camera image names') image_names = set(match) results = [] for image_name in image_names: match = re.search( image_name + '\.(?:mjpg|jpg).*?user=(.*?)&', t ) if match and len(match.group(1)) > 0: d = base64.b64decode(unquote(match.group(1))) \ .decode('ASCII') creds = d.split(':') if len(creds) < 2: raise XeomaError('Error parsing image credentials') results.append((image_name, creds[0], creds[1])) else: results.append((image_name, None, None)) return results except asyncio.TimeoutError as e: raise XeomaError("Unable to connect to Xeoma web server")
Parse web server camera view for camera image names
def get_session_cookie(self): if self._login is not None and self._password is not None: session_key = self.encode_user(self._login, self._password) return {'sessionkey': session_key} else: return None
Create a session cookie object for use by aiohttp
def _get_sha256_digest(self, content): content_sha256 = base64.b64encode(SHA256.new(content).digest()) return 'SHA256=' + content_sha256
Return the sha256 digest of the content in the header format the Merchant API expects.
def _sha256_sign(self, method, url, headers, body): d = '' sign_headers = method.upper() + '|' + url + '|' for key, value in sorted(headers.items()): if key.startswith('X-Mcash-'): sign_headers += d + key.upper() + '=' + value d = '&' rsa_signature = base64.b64encode( self.signer.sign(SHA256.new(sign_headers))) return 'RSA-SHA256 ' + rsa_signature
Sign the request with SHA256.
def create_or_update_groups(self): for name in self.group_names: try: Group.objects.get(name=name) except ObjectDoesNotExist: Group.objects.create(name=name) Group.objects.exclude(name__in=self.group_names).delete() if self.verbose: names = [obj.name for obj in Group.objects.all().order_by("name")] sys.stdout.write(f" Groups are: {', '.join(names)}\n")
Add/Deletes group model instances to match the the given list of group names.
def token(self): '''Attempt to return the auth header token. :return: token related to request ''' auth_header = self.headers.get('Authorization', '') if 'Token ' in auth_header: return auth_header.partition('Token ')[-1] else: return auth_headef token(self): '''Attempt to return the auth header token. :return: token related to request ''' auth_header = self.headers.get('Authorization', '') if 'Token ' in auth_header: return auth_header.partition('Token ')[-1] else: return auth_header
Attempt to return the auth header token. :return: token related to request
def create_toolbox(self, filename): filename = os.path.splitext(filename)[0] label = os.path.basename(filename) # Get task information first so we can build the tool list tool_list = [] for task in self.tasks: tool_list.append(task.name) file_descriptor = os.open(filename + '.pyt', os.O_WRONLY | os.O_CREAT | os.O_EXCL) with os.fdopen(file_descriptor, 'w') as self.toolbox_file: self.toolbox_file.write(self._imports_template.substitute({})) toolbox_class = self._toolbox_class_template.substitute( {'label': label, 'alias': self.alias, 'toolList': param_builder.convert_list(tool_list) } ) self.toolbox_file.write(toolbox_class) for task in self.tasks: gp_tool = self.create_tool(task) self.toolbox_file.write(gp_tool) toolbox_help_filename = '.'.join((filename, task.name, 'pyt', 'xml')) help_builder.create(toolbox_help_filename, task, self.alias) return filename
Creates a new Python toolbox where each task name is a GPTool in the toolbox. :param filename: the filename of the generated toolbox :param service_name: The name of the ESE service containing the tasks. Only tasks from one service may be used. :param tasks: The list of tasks from the service to build as GPTools.
def create_tool(self, task): gp_tool = dict(taskName=task.name, taskDisplayName=task.display_name, taskDescription=task.description, canRunInBackground=True, taskUri=task.uri) gp_tool['execute'] = self._execute_template.substitute(gp_tool) gp_tool['parameterInfo'] = param_builder.create_param_info(task.parameters, self.parameter_map) gp_tool['updateParameter'] = param_builder.create_update_parameter(task.parameters, self.parameter_map) gp_tool['preExecute'] = param_builder.create_pre_execute(task.parameters, self.parameter_map) gp_tool['postExecute'] = param_builder.create_post_execute(task.parameters, self.parameter_map) return self._tool_template.substitute(gp_tool)
Creates a new GPTool for the toolbox.
def import_script(self, script_name): filename = os.path.abspath(script_name) with open(filename, 'r') as script_file: self.toolbox_file.write(script_file.read())
Finds the script file and copies it into the toolbox
def union(self, other, *, ignore_strand=False): if not ignore_strand: self._assert_same_chromosome_and_strand(other) interval = deepcopy(self) interval.union_update(other) return interval
Union of two intervals :param GenomicInterval other: interval to union with :return: union of two intervals íf overlapping or touching :rtype: GenomicInterval
def make_relationship(self, relator, direction= RELATIONSHIP_DIRECTIONS.BIDIRECTIONAL): if IEntity.providedBy(relator): # pylint:disable=E1101 rel = DomainRelationship(relator, self, direction=direction) elif IResource.providedBy(relator): # pylint:disable=E1101 rel = ResourceRelationship(relator, self, direction=direction) else: raise ValueError('Invalid relator argument "%s" for ' 'relationship; must provide IEntity or ' 'IResource.' % relator) return rel
Create a relationship object for this attribute from the given relator and relationship direction.
def parse(version): # type: (Union[str, unicode]) -> list chunks = [] for chunk in re.findall(r"(\d+|[A-Za-z]\w*)", version): try: chunk = int(chunk) except ValueError: pass chunks.append(chunk) return chunks
Transform version string into comparable list :param version: version string, e.g. 0.11.23rc1 :return: list of version chunks, e.g. [0, 11, 23, 'rc1'] >>> parse("1") [1] >>> parse("0.0.1") [0, 0, 1] >>> parse("0.11.23rc1") [0, 11, 23, 'rc1']
def compare(ver1, ver2): # type: (Union[str, unicode], Union[str, unicode]) -> int chunks1 = parse(str(ver1)) chunks2 = parse(str(ver2)) min_len = min(len(chunks1), len(chunks2)) for i in range(min_len): if chunks1[i] > chunks2[i]: return 1 elif chunks1[i] < chunks2[i]: return -1 if len(chunks1) > min_len and isinstance(chunks1[min_len], str): return -1 if len(chunks2) > min_len and isinstance(chunks2[min_len], str): return 1 return 0
Compares two version string, returning {-1|0|1} just as cmp(). (-1: ver1 < ver2, 0: ver1==ver2, 1: ver1 > ver2) >>> compare("0.1.1", "0.1.2") -1 >>> compare("0.1.2", "0.1.1") 1 >>> compare("0.1", "0.1.1") 0 >>> compare("0.1.1rc1", "0.1.1a") 1 >>> compare("0.1.1rc1", "0.1.1") -1
def register(self, app, options): url_prefix = options.get('url_prefix', self.url_prefix) # Routes for future in self.routes: # attach the blueprint name to the handler so that it can be # prefixed properly in the router future.handler.__blueprintname__ = self.name # Prepend the blueprint URI prefix if available uri = url_prefix + future.uri if url_prefix else future.uri app.route( uri=uri[1:] if uri.startswith('//') else uri, methods=future.methods, host=future.host or self.host, strict_slashes=future.strict_slashes, stream=future.stream )(future.handler) # Middleware for future in self.middlewares: if future.args or future.kwargs: app.middleware(*future.args, **future.kwargs)(future.middleware) else: app.middleware(future.middleware) # Exceptions for future in self.exceptions: app.exception(*future.args, **future.kwargs)(future.handler) # Static Files for future in self.statics: # Prepend the blueprint URI prefix if available uri = url_prefix + future.uri if url_prefix else future.uri app.static(uri, future.file_or_directory, *future.args, **future.kwargs) # Event listeners for event, listeners in self.listeners.items(): for listener in listeners: app.listener(event)(listener)
Register the blueprint to the mach9 app.
def add_route(self, handler, uri, methods=frozenset({'GET'}), host=None, strict_slashes=False): # Handle HTTPMethodView differently if hasattr(handler, 'view_class'): http_methods = ( 'GET', 'POST', 'PUT', 'HEAD', 'OPTIONS', 'PATCH', 'DELETE') methods = set() for method in http_methods: if getattr(handler.view_class, method.lower(), None): methods.add(method) # handle composition view differently if isinstance(handler, self._composition_view_class): methods = handler.handlers.keys() self.route(uri=uri, methods=methods, host=host, strict_slashes=strict_slashes)(handler) return handler
Create a blueprint route from a function. :param handler: function for handling uri requests. Accepts function, or class instance with a view_class method. :param uri: endpoint at which the route will be accessible. :param methods: list of acceptable HTTP methods. :return: function or class instance
def static(self, uri, file_or_directory, *args, **kwargs): static = FutureStatic(uri, file_or_directory, args, kwargs) self.statics.append(static)
Create a blueprint static route from a decorated function. :param uri: endpoint at which the route will be accessible. :param file_or_directory: Static asset.
def remove_boards_gui(hwpack=''): if not hwpack: if len(hwpack_names()) > 1: hwpack = psidialogs.choice(hwpack_names(), 'select hardware package to select board from!', title='select') else: hwpack = hwpack_names()[0] print('%s selected' % hwpack) if hwpack: sel = psidialogs.multi_choice(board_names(hwpack), 'select boards to remove from %s!' % boards_txt( hwpack), title='remove boards') print('%s selected' % sel) if sel: for x in sel: remove_board(x) print('%s was removed' % x)
remove boards by GUI.
def group_activities(queryset): actions = OrderedDict() for item in queryset: # current is defined as within the past day. if item.timestamp >= datetime.datetime.now() - datetime.timedelta(days=1): current_item = True else: current_item = False if item.target not in actions.keys(): actions[item.target] = { 'item': item, 'actors': [item.actor_name], 'actor_count': 0, 'verb': item.override_string if item.override_string else item.verb, 'last_modified': item.timestamp, 'current_item': current_item } else: # item was previously added, but we need to update attributes if item.actor_name not in actions[item.target]['actors']: actions[item.target]['actors'].append(item.actor_name) actions[item.target]['actor_count'] += 1 if actions[item.target]['last_modified'] < item.timestamp: actions[item.target]['last_modified'] = item.timestamp return actions
Given a queryset of activity objects, will group them by actors and return an OrderedDict including: item: The original target item being acted upon (activity.content_object) actors: a list of all actors who have acted upon the target. actor_count: zero-indexed count of actors. Useful for "Joe and {{ actor_count }} others have..." verb: the item's verb string, to avoid extra lookups last_modified: the time the target was last acted upon. The string version of the target is also available as the dict key.
def single(C, namespace=None): if namespace is None: B = C()._ else: B = C(default=namespace, _=namespace)._ return B
An element maker with a single namespace that uses that namespace as the default
def intersect(self, **kwargs): end_point = kwargs.pop('end_point') depth = self.get_depth(location=end_point) # Bathymetry and a particle's depth are both negative down if depth < 0 and depth > end_point.depth: inter = True else: inter = False return inter
Intersect Point and Bathymetry returns bool
def react(self, **kwargs): react_type = kwargs.get("type", self._type) if react_type == 'hover': return self.__hover(**kwargs) elif react_type == 'stick': pass elif react_type == 'reverse': return self.__reverse(**kwargs) else: raise ValueError("Bathymetry interaction type not supported")
The time of recation is ignored hereTime is ignored here and should be handled by whatever called this function.
def __hover(self, **kwargs): end_point = kwargs.pop('end_point') # The location argument here should be the point that intersected the bathymetry, # not the end_point that is "through" the bathymetry. depth = self.get_depth(location=end_point) return Location4D(latitude=end_point.latitude, longitude=end_point.longitude, depth=(depth + 1.))
This hovers the particle 1m above the bathymetry WHERE IT WOULD HAVE ENDED UP. This is WRONG and we need to compute the location that it actually hit the bathymetry and hover 1m above THAT.
def __reverse(self, **kwargs): start_point = kwargs.pop('start_point') return Location4D(latitude=start_point.latitude, longitude=start_point.longitude, depth=start_point.depth)
If we hit the bathymetry, set the location to where we came from.
def main(): # type: () -> typing.Any colorama.init(wrap=six.PY3) doc = usage.get_primary_command_usage() allow_subcommands = '<command>' in doc args = docopt(doc, version=settings.version, options_first=allow_subcommands) if sys.excepthook is sys.__excepthook__: sys.excepthook = log.excepthook try: log.enable_logging(log.get_log_level(args)) default_args = sys.argv[2 if args.get('<command>') else 1:] if (args.get('<command>') == 'help' and None not in settings.subcommands): subcommand = next(iter(args.get('<args>', default_args)), None) return usage.get_help_usage(subcommand) argv = [args.get('<command>')] + args.get('<args>', default_args) return _run_command(argv) except exc.InvalidCliValueError as e: return str(e)
Parse the command line options and launch the requested command. If the command is 'help' then print the help message for the subcommand; if no subcommand is given, print the standard help message.
def _get_subcommand(name): # type: (str) -> config.RcliEntryPoint _LOGGER.debug('Accessing subcommand "%s".', name) if name not in settings.subcommands: raise ValueError( '"{subcommand}" is not a {command} command. \'{command} help -a\' ' 'lists all available subcommands.'.format( command=settings.command, subcommand=name) ) return settings.subcommands[name]
Return the function for the specified subcommand. Args: name: The name of a subcommand. Returns: The loadable object from the entry point represented by the subcommand.
def _run_command(argv): # type: (typing.List[str]) -> typing.Any command_name, argv = _get_command_and_argv(argv) _LOGGER.info('Running command "%s %s" with args: %s', settings.command, command_name, argv) subcommand = _get_subcommand(command_name) func = call.get_callable(subcommand) doc = usage.format_usage(subcommand.__doc__) args = _get_parsed_args(command_name, doc, argv) return call.call(func, args) or 0
Run the command with the given CLI options and exit. Command functions are expected to have a __doc__ string that is parseable by docopt. Args: argv: The list of command line arguments supplied for a command. The first argument is expected to be the name of the command to be run. Note that this is different than the full arguments parsed by docopt for the entire program. Raises: ValueError: Raised if the user attempted to run an invalid command.
def _get_command_and_argv(argv): # type: (typing.List[str]) -> typing.Tuple[str, typing.List[str]] command_name = argv[0] if not command_name: argv = argv[1:] elif command_name == settings.command: argv.remove(command_name) return command_name, argv
Extract the command name and arguments to pass to docopt. Args: argv: The argument list being used to run the command. Returns: A tuple containing the name of the command and the arguments to pass to docopt.
def _get_parsed_args(command_name, doc, argv): # type: (str, str, typing.List[str]) -> typing.Dict[str, typing.Any] _LOGGER.debug('Parsing docstring: """%s""" with arguments %s.', doc, argv) args = docopt(doc, argv=argv) if command_name == settings.command: args[command_name] = True return args
Parse the docstring with docopt. Args: command_name: The name of the subcommand to parse. doc: A docopt-parseable string. argv: The list of arguments to pass to docopt during parsing. Returns: The docopt results dictionary. If the subcommand has the same name as the primary command, the subcommand value will be added to the dictionary.
def trace(msg): if os.environ.get('JARN_TRACE') == '1': print('TRACE:', msg, file=sys.stderr)
Print a trace message to stderr if environment variable is set.
def Efn(Ms,eccs): Ms = np.atleast_1d(Ms) eccs = np.atleast_1d(eccs) unit = np.floor(Ms / (2*np.pi)) Es = EFN((Ms % (2*np.pi)),eccs) Es += unit*(2*np.pi) return Es
works for -2pi < Ms < 2pi, e <= 0.97
def import_module(modulefn): # We can't import python modules by specifying the full path to the module # Temporarily add the module path to the pythonpath sys.path = [MODULES_FOLDER_PATH] + sys.path moduleobj = __import__(modulefn) try: _attach_module_identifier(moduleobj.moduledata['command_dict'], modulefn) return moduleobj.moduledata except (NameError, KeyError): raise seash_exceptions.ModuleImportError("Module '" + modulefn + "' is not well defined") finally: # Remove the module path from the pythonpath because we don't need it anymore sys.path = sys.path[1:]
<Purpose> Imports a seash module with the specified modulename. The seash module is treated as a python package <Arguments> modulefn: The name of the main modules file. <Side Effects> The commands found in modulename, alongside the helptext for the module will be imported and returned. <Exceptions> ImportError <Return> A dictionary containing the command_dict and the helptext. An example: { 'command_dict': {'command1': ...,'command2':...}, 'help_text': 'This is the module helpstring' }
def import_all_modules(): for module_folder in get_installed_modules(): try: if module_folder in module_data: raise seash_exceptions.ModuleImportError("Module already imported") module_data[module_folder] = import_module(module_folder) except seash_exceptions.ModuleImportError, e: print str(e)
<Purpose> Imports all modules within the modules folder. This should only be called once throughout the entire execution of seash. <Side Effects> Modules that don't have collisions will have their commanddicts and helptexts loaded and returned. <Exceptions> ImportError: There is an existing module with the same name already imported. <Return> The seashcommanddict that contains the imported commands on top of the passed in commanddict.
def ensure_no_conflicts_in_commanddicts(originaldict, comparedict): """ Child nodes are identical if they all of the following are identical: helptext/callback/summary. There are 3 cases we have to worry about. > Shared child node. > Child nodes are identical. Check grandchildren. > Only one is defined. Check grandchildren. > Both child nodes are defined and are not identical. Reject. > Node is not shared. Accept. """ for child in comparedict.keys(): # Node not shared. if child not in originaldict: continue # Shared node comparechild_defined = is_commanddictnode_defined(comparedict[child]) originalchild_defined = is_commanddictnode_defined(originaldict[child]) # Only one is defined, or; # both are defined and they are identical if ((comparechild_defined ^ originalchild_defined) or (comparechild_defined and originalchild_defined and _are_cmd_nodes_same(originaldict[child], comparedict[child]))): try: ensure_no_conflicts_in_commanddicts(comparedict[child]['children'], originaldict[child]['children']) except seash_exceptions.ModuleConflictError, e: # Reconstruct the full command recursively raise seash_exceptions.ModuleConflictError(child + " " + str(e) + " ("+module_name+")") continue # Not identical. Conflict found. # Also include which module the conflicting module was found from. if 'module' in originaldict[child]: module_name = originaldict['module'][child] else: module_name = "default" raise seash_exceptions.ModuleConflictError(child + ' ('+module_name+')')
<Purpose> Recursively compares two commanddicts to see if they have conflicting commands. <Arguments> originaldict: A commanddict to compare. comparedict: A commanddict to compare. <Side Effects> None <Exceptions> ModuleConflictError - A command was conflicting. The error detail is the problematic command. <Returns> None
def _are_cmd_nodes_same(node1, node2): # Everything in node1 should be in node2 for propertytype in node1: if (not propertytype in node2 or node1[propertytype] != node2[propertytype]): return False return True
Checks to see if two cmddnodes are the same. Two cmdnodes are defined to be the same if they have the same callbacks/ helptexts/summaries.
def are_cmddicts_same(dict1, dict2): # If the set of all keys are not the same, they must not be the same. if set(dict1.keys()) != set(dict2.keys()): return False # Everything in dict1 should be in dict2 for key in dict1: # Check everything except children; Check for children recursively for propertytype in dict1[key]: if (not propertytype in dict2[key] or dict1[key][propertytype] != dict2[key][propertytype]): return False # Check children if not are_cmddicts_same(dict1[key]['children'], dict2[key]['children']): return False return True
Checks to see if two cmddicts are the same. Two cmddicts are defined to be the same if they have the same callbacks/ helptexts/children/summaries for all nodes.
def merge_commanddict_recursive(originaldict, mergedict): """ Every command in the mergedict should be placed into the original. We do not handle the case where a shared node is defined on both sides. That check is done by ensure_no_conflicts_in_commanddict(). We make a deep copy of mergedict to make the deletion case easier. """ for commandnode in mergedict: # Trivial case if commandnode not in originaldict: originaldict[commandnode] = deepcopy(mergedict[commandnode]) else: # Shared node exists in original but is not defined # Replace properties if they exist, and then merge over the children. if not is_commanddictnode_defined(originaldict[commandnode]): for entry in mergedict[commandnode]: if not entry in ['children', 'module']: originaldict[commandnode][entry] = mergedict[commandnode][entry] merge_commanddict_recursive(originaldict[commandnode]['children'], mergedict[commandnode]['children'])
<Purpose> Recursively merge mergedict into originaldict. We assume that there are no conflicting modules here. Be sure to check that there aren't any collisions! <Arguments> originaldict: The commanddict to merge to. mergedict: The commanddict to merge from. <Side Effects> Originaldict will contain all command entries in mergedict. <Exceptions> There shouldn't be any... <Return> None
def remove_commanddict(originaldict, removedict): for child in removedict: if child in originaldict: # Recursively remove all children specified remove_commanddict(originaldict[child]['children'], removedict[child]['children']) # Remove the definition as well if it is defined in removedict if is_commanddictnode_defined(removedict[child]): # Remove everything except for children. We remove those recursively. for propertytype in removedict[child]: # Not all properties (i.e. module) will be defined in the original # dictionary. We may raise an exception when trying to delete one # such property. if (propertytype != 'children' and propertytype in originaldict[child]): del originaldict[child][propertytype] # Remove this node if this node is no longer defined, and if there are no # remaining child nodes. if not (is_commanddictnode_defined(originaldict[child]) or originaldict[child]['children']): del originaldict[child]
<Purpose> Removes all commands found in a command dictionary from another command dictionary. Remember to perform the same call on under the help node so that the help command works as expected. e.g. remove_commanddict(seashcommanddict, mycommanddict) remove_commanddict(seashcommanddict['help']['children'], mycommanddict) <Arguments> originaldict: The commanddict to remove from. removedict: The commanddict containing the commands to remove. <Side Effects> All commands in cmddict_merge will be removed from originaldict. A node will not be removed while there are children under that node. However, if a parent node is undefined and the last defined child is removed, that parent node will be removed as well. <Exceptions> None <Returns> None
def enable(commanddict, modulename): # Is this an installed module? if not modulename in module_data: raise seash_exceptions.UserError("Error, module '"+modulename+"' is not installed") if _is_module_enabled(modulename): raise seash_exceptions.UserError("Module is already enabled.") merge_commanddict(commanddict, module_data[modulename]['command_dict']) try: # We mark this module as enabled by deleting the modulename.disabled file os.remove(MODULES_FOLDER_PATH + os.sep + modulename + ".disabled") except OSError, e: # If the file was deleted before we were able to delete it, it should not # be a problem. if not "cannot find the file" in str(e): raise try: initialize(modulename) except seash_exceptions.InitializeError, e: raise seash_exceptions.InitializeError(e)
<Purpose> Enables a module and imports its commands into the seash commanddict. <Arguments> modulename: The module to import. <Side Effects> All commands inside the specified module will be inserted into the seash commanddict if possible. The file modulename.disabled will be removed from /modules/ indicating that this module has been enabled. <Exceptions> Exceptions raised by merge_commanddict() <Returns> None
def disable(commanddict, modulename): # Is this an installed module? if not modulename in module_data: raise seash_exceptions.UserError("Error, module '"+modulename+"' is not installed") # Is this module enabled? if not _is_module_enabled(modulename): raise seash_exceptions.UserError("Module is not enabled.") remove_commanddict(commanddict, module_data[modulename]['command_dict']) cleanup(modulename) # We mark this module as disabled by adding a modulename.disabled file. open(MODULES_FOLDER_PATH + os.sep + modulename + ".disabled", 'w')
<Purpose> Disables a module and removes its commands from the seash commanddict. <Arguments> modulename: The module to disable. <Side Effects> All commands inside the specified module will be removed from the seash commanddict. A file (modulename.disabled) will be created under /modules/ indicating that this module has been disabled. <Exceptions> Exceptions raised by merge_commanddict() <Returns> None
def get_enabled_modules(): enabled = [] directory_contents = os.listdir(MODULES_FOLDER_PATH) for fname in get_installed_modules(): if not fname+'.disabled' in directory_contents: enabled.append(fname) return enabled
<Purpose> Returns all enabled modules. <Arguments> None <Side Effects> None <Exceptions> None <Return> The list of all enabled modules.
def enable_modules_from_last_session(seashcommanddict): successfully_enabled_modules = [] modules_to_enable = get_enabled_modules() for modulename in modules_to_enable: # There are no bad side effects to seash's state when we do this # The only thing that should happen is that the modulename.disabled file # gets created (temporarily) disable(seashcommanddict, modulename) try: enable(seashcommanddict, modulename) successfully_enabled_modules.append(modulename) except seash_exceptions.ModuleConflictError, e: print "Failed to enable the '"+modulename+"' module due to the following conflicting command:" print str(e) # We mark this module as disabled by adding a modulename.disabled file. open(MODULES_FOLDER_PATH + os.sep + modulename + ".disabled", 'w') except seash_exceptions.InitializeError, e: print "Failed to enable the '"+modulename+"' module." disable(seashcommanddict, modulename) successfully_enabled_modules.sort() print 'Enabled modules:', ', '.join(successfully_enabled_modules), '\n'
Enable every module that isn't marked as disabled in the modules folder. This function is meant to be called when seash is initializing and nowhere else. A module is marked as disabled when there is a modulename.disabled file.
def tab_complete(input_list): commands = [] for module in get_enabled_modules(): if 'tab_completer' in module_data[module]: commands += module_data[module]['tab_completer'](input_list) return commands
<Purpose> Gets the list of all valid tab-complete strings from all enabled modules. <Arguments> input_list: The list of words the user entered. <Side Effects> None <Exceptions> None <Returns> A list of valid tab-complete strings
def preprocess_input(userinput): for module in get_enabled_modules(): # Not every module has a preprocessor... if 'input_preprocessor' in module_data[module]: userinput = module_data[module]['input_preprocessor'](userinput) return userinput
<Purpose> Preprocess the raw command line input string. <Arguments> The raw command line input string. We assume it is pre-stripped. <Side Effects> The string will be processed by each module that has a defined preprocessor. <Exceptions> None <Returns> The preprocessed string.
def _attach_module_identifier(command_dict, modulefn): for command in command_dict: command_dict[command]['module'] = modulefn _attach_module_identifier(command_dict[command]['children'], modulefn)
Attaches a 'module': modulename entry to each node in the dictionary. This is used by the help printer so that the user can tell if a command was included by default or via a module.