code
stringlengths
52
7.75k
docs
stringlengths
1
5.85k
def result(self, timeout=None, do_raise=True): with self._lock: self.wait(timeout, do_raise=do_raise) if self._exc_info: if not do_raise: return None # Its more important to re-raise the exception from the worker. self._exc_retrieved = True reraise(*self._exc_info) if self._cancelled: if not do_raise: return None raise self.Cancelled() return self._result
Retrieve the result of the future, waiting for it to complete or at max *timeout* seconds. :param timeout: The number of maximum seconds to wait for the result. :param do_raise: Set to False to prevent any of the exceptions below to be raised and return :const:`None` instead. :raise Cancelled: If the future has been cancelled. :raise Timeout: If the *timeout* has been exceeded. :raise BaseException: Anything the worker has raised. :return: Whatever the worker bound to the future returned.
def exception(self, timeout=None, do_raise=True): with self._lock: self.wait(timeout, do_raise=do_raise) if not self._exc_info: return None self._exc_retrieved = True if self._cancelled: raise self.Cancelled() return self._exc_info[1]
Returns the exception value by the future's worker or :const:`None`. :param timeout: :param do_raise: :param Cancelled: :param Timeout: :return: :const:`None` or an exception value.
def cancel(self, mark_completed_as_cancelled=False): with self._lock: if not self._completed or mark_completed_as_cancelled: self._cancelled = True callbacks = self._prepare_done_callbacks() callbacks()
Cancel the future. If the future has not been started yet, it will never start running. If the future is already running, it will run until the worker function exists. The worker function can check if the future has been cancelled using the :meth:`cancelled` method. If the future has already been completed, it will not be marked as cancelled unless you set *mark_completed_as_cancelled* to :const:`True`. :param mark_completed_as_cancelled: If this is :const:`True` and the future has already completed, it will be marked as cancelled anyway.
def set_result(self, result): with self._lock: if self._enqueued: raise RuntimeError('can not set result of enqueued Future') self._result = result self._completed = True callbacks = self._prepare_done_callbacks() callbacks()
Allows you to set the result of the future without requiring the future to actually be executed. This can be used if the result is available before the future is run, allowing you to keep the future as the interface for retrieving the result data. :param result: The result of the future. :raise RuntimeError: If the future is already enqueued.
def set_exception(self, exc_info): if not isinstance(exc_info, tuple): if not isinstance(exc_info, BaseException): raise TypeError('expected BaseException instance') try: # TODO: Filld the traceback so it appears as if the exception # was actually raised by the caller? (Not sure if possible) raise exc_info except: exc_info = sys.exc_info() exc_info = (exc_info[0], exc_info[1], exc_info[2]) with self._lock: if self._enqueued: raise RuntimeError('can not set exception of enqueued Future') self._exc_info = exc_info self._completed = True callbacks = self._prepare_done_callbacks() callbacks()
This method allows you to set an exception in the future without requring that exception to be raised from the futures worker. This method can be called on an unbound future. :param exc_info: Either an exception info tuple or an exception value. In the latter case, the traceback will be automatically generated from the parent frame. :raise RuntimeError: If the future is already enqueued.
def wait(self, timeout=None, do_raise=False): if timeout is not None: timeout = float(timeout) start = time.clock() with self._lock: while not self._completed and not self._cancelled: if timeout is not None: time_left = timeout - (time.clock() - start) else: time_left = None if time_left is not None and time_left <= 0.0: if do_raise: raise self.Timeout() else: return False self._lock.wait(time_left) return True
Wait for the future to complete. If *timeout* is specified, it must be a floating point number representing the maximum number of seconds to wait. :param timeout: The maximum number of seconds to wait for the future to complete. :param do_raise: Raise :class:`Timeout` when a timeout occurred. :raise Timeout: If a timeout occurred and *do_raise* was True. :return: :const:`True` if the future completed, :const:`False` if a timeout occurred and *do_raise* was set to False.
def enqueue(self, future): future.enqueue() with self._lock: if self._shutdown: raise RuntimeError('ThreadPool has been shut down and can no ' 'longer accept futures.') self._queue.append(future) if len(self._running) == len(self._workers): self._new_worker() self._lock.notify_all()
Enqueue a future to be processed by one of the threads in the pool. The future must be bound to a worker and not have been started yet.
def submit(self, __fun, *args, **kwargs): future = Future().bind(__fun, *args, **kwargs) self.enqueue(future) return future
Creates a new future and enqueues it. Returns the future.
def cancel(self, cancel_running=True, mark_completed_as_cancelled=False): with self._lock: for future in self._queue: future.cancel(mark_completed_as_cancelled) if cancel_running: for future in self._running: future.cancel(mark_completed_as_cancelled) self._queue.clear()
Cancel all futures queued in the pool. If *cancel_running* is True, futures that are currently running in the pool are cancelled as well.
def shutdown(self, wait=True): with self._lock: self._shutdown = True self._lock.notify_all() if wait: self.wait()
Shut down the pool. If *wait* is True, it will wait until all futures are completed. Alternatively, you can use the #wait() method to wait with timeout supported.
def wait(self, timeout=None): tbegin = _get_timeout_begin(timeout) with self._lock: while self._queue or self._running: remainder = _get_timeout_remainder(tbegin, timeout) if remainder is not None and remainder <= 0.0: return False # timeout self._lock.wait(remainder) if self._shutdown: for worker in self._workers: worker.join() return True
Wait until all futures are completed. You should call this method only after calling #shutdown(). Returns #False if all futures are complete, #False if there are still some running.
def timeit(func): @wraps(func) def timer_wrapper(*args, **kwargs): """ Inner function that uses the Timer context object """ with Timer() as timer: result = func(*args, **kwargs) return result, timer return timer_wrapper
Returns the number of seconds that a function took along with the result
def timeout(seconds): def _timeout_error(signal, frame): raise TimeoutError("Operation did not finish within \ {} seconds".format(seconds)) def timeout_decorator(func): @wraps(func) def timeout_wrapper(*args, **kwargs): signal.signal(signal.SIGALRM, _timeout_error) signal.alarm(seconds) try: return func(*args, **kwargs) finally: signal.alarm(0) return timeout_wrapper return timeout_decorator
Raises a TimeoutError if a function does not terminate within specified seconds.
def create(location: str, extensions_found: List[str] = None): # -> NoParserFoundForObject: if not extensions_found: return ObjectPresentMultipleTimesOnFileSystemError('Object : ' + location + ' is present multiple ' 'times on the file system.') else: return ObjectPresentMultipleTimesOnFileSystemError('Object : ' + location + ' is present multiple ' 'times on the file system , with extensions : ' + str(extensions_found) + '. Only one version of each ' 'object should be provided. If you need multiple files' ' to create this object, you should create a multifile' ' object instead (with each file having its own name and' ' a shared prefix)')
Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests https://github.com/nose-devs/nose/issues/725 :param location: :param extensions_found: :return:
def create(location: str, simpleobjects_found = None, complexobject_attributes_found = None): # -> ObjectNotFoundOnFileSystemError: if len(complexobject_attributes_found) > 0 or len(simpleobjects_found) > 0: return ObjectNotFoundOnFileSystemError('Mandatory object : ' + location + ' could not be found on the file' ' system, either as a multifile or as a singlefile with any ' 'extension, but it seems that this is because you have left the ' 'extension in the location name. Please remove the file extension ' 'from the location name and try again') else: return ObjectNotFoundOnFileSystemError('Mandatory object : ' + location + ' could not be found on the file' ' system, either as a multifile or as a singlefile with any ' 'extension.')
Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests https://github.com/nose-devs/nose/issues/725 :param location: :return:
def find_multifile_object_children(self, parent_location: str, no_errors: bool = False) -> Dict[str, str]: pass
Implementing classes should return a dictionary of <item_name>, <item_location> containing the named elements in this multifile object. :param parent_location: the absolute file prefix of the parent item. :return: a dictionary of {item_name : item_prefix}
def get_pretty_location(self, blank_parent_part: bool = False, append_file_ext: bool = True, compact_file_ext: bool = False): if append_file_ext: if compact_file_ext: suffix = self.ext if self.is_singlefile else '' else: suffix = ' (' + self.get_pretty_file_ext() + ')' else: suffix = '' if blank_parent_part: # TODO sep should be replaced with the appropriate separator in flat mode idx = self.location.rfind(sep) return (' ' * (idx-1-len(sep))) + '|--' + self.location[(idx+1):] + suffix else: return self.location + suffix
Utility method to return a string representing the location, mode and extension of this file. :return:
def get_pretty_child_location(self, child_name, blank_parent_part: bool = False): if blank_parent_part: idx = len(self.location) return (' ' * (idx-3)) + '|--' + child_name else: # TODO sep should be replaced with the appropriate separator in flat mode return self.location + sep + child_name
Utility method to return a string representation of the location of a child :param child_name: :param blank_parent_part: :return:
def create_persisted_object(self, location: str, logger: Logger) -> PersistedObject: #print('Checking all files under ' + location) logger.debug('Checking all files under [{loc}]'.format(loc=location)) obj = FileMappingConfiguration.RecursivePersistedObject(location=location, file_mapping_conf=self, logger=logger) #print('File checks done') logger.debug('File checks done') return obj
Creates a PersistedObject representing the object at location 'location', and recursively creates all of its children :param location: :param logger: :return:
def find_multifile_object_children(self, parent_location, no_errors: bool = False) -> Dict[str, str]: # (1) Assert that folder_path is a folder if not isdir(parent_location): if no_errors: return dict() else: raise ValueError('Cannot find a multifileobject at location \'' + parent_location + '\' : location is ' 'not a valid folder') else: # (2) List folders (multifile objects or collections) all_subfolders = [dir_ for dir_ in listdir(parent_location) if isdir(join(parent_location, dir_))] items = {item_name: join(parent_location, item_name) for item_name in all_subfolders} # (3) List singlefiles *without* their extension items.update({ item_name: join(parent_location, item_name) for item_name in [file_name[0:file_name.rindex(EXT_SEPARATOR)] for file_name in listdir(parent_location) if isfile(join(parent_location, file_name)) and EXT_SEPARATOR in file_name] }) # (4) return all return items
Implementation of the parent abstract method. In this mode, root_path should be a valid folder, and each item is a subfolder (multifile) or a file (singlefile): location/ |-singlefile_sub_item1.<ext> |-singlefile_sub_item2.<ext> |-multifile_sub_item3/ |- ... :param parent_location: the absolute file prefix of the parent item. it may be a folder (non-flat mode) or a folder + a file name prefix (flat mode) :param no_errors: a boolean used in internal recursive calls in order to catch errors. Should not be changed by users. :return: a dictionary of {item_name : item_prefix}
def is_multifile_object_without_children(self, location: str) -> bool: return isdir(location) and len(self.find_multifile_object_children(location)) == 0
Returns True if an item with this location is present as a multifile object without children. For this implementation, this means that there is a folder without any files in it :param location: :return:
def get_multifile_object_child_location(self, parent_item_prefix: str, child_name: str) -> str: check_var(parent_item_prefix, var_types=str, var_name='parent_item_prefix') check_var(child_name, var_types=str, var_name='item_name') # assert that folder_path is a folder if not isdir(parent_item_prefix): raise ValueError( 'Cannot get attribute item in non-flat mode, parent item path is not a folder : ' + parent_item_prefix) return join(parent_item_prefix, child_name)
Implementation of the parent abstract method. In this mode the attribute is a file inside the parent object folder :param parent_item_prefix: the absolute file prefix of the parent item. :return: the file prefix for this attribute
def is_multifile_object_without_children(self, location: str) -> bool: # (1) Find the base directory and base name if isdir(location): # special case: parent location is the root folder where all the files are. return len(self.find_multifile_object_children(location)) == 0 else: # TODO same comment than in find_multifile_object_children if exists(location): # location is a file without extension. We can accept that as being a multifile object without children return True else: return False
Returns True if an item with this location is present as a multifile object without children. For this implementation, this means that there is a file with the appropriate name but without extension :param location: :return:
def get_multifile_object_child_location(self, parent_location: str, child_name: str): check_var(parent_location, var_types=str, var_name='parent_path') check_var(child_name, var_types=str, var_name='item_name') # a child location is built by adding the separator between the child name and the parent location return parent_location + self.separator + child_name
Implementation of the parent abstract method. In this mode the attribute is a file with the same prefix, separated from the parent object name by the character sequence <self.separator> :param parent_location: the absolute file prefix of the parent item. :param child_name: :return: the file prefix for this attribute
def find_simpleobject_file_occurrences(self, location) -> Dict[str, str]: parent_dir = dirname(location) if parent_dir is '': parent_dir = '.' base_prefix = basename(location) # trick : is sep_for_flat is a dot, we have to take into account that there is also a dot for the extension min_sep_count = (1 if self.separator == EXT_SEPARATOR else 0) possible_object_files = {object_file[len(base_prefix):]: join(parent_dir, object_file) for object_file in listdir(parent_dir) if isfile(parent_dir + '/' + object_file) and object_file.startswith(base_prefix) # file must be named base_prefix.something and object_file != base_prefix and object_file[len(base_prefix)] == EXT_SEPARATOR and (object_file[len(base_prefix):]).count(EXT_SEPARATOR) == 1 # and no other item separator should be present in the something and (object_file[len(base_prefix):]).count(self.separator) == min_sep_count} return possible_object_files
Implementation of the parent abstract method. :param location: :return: a dictionary{ext : file_path}
def list2html(text): ''' Very simple replacement for lists, no nesting, not even two lists in the same 'text'... (yet sufficient for the current regulations) Assumes list is in a paragraph. ''' match = r'- (.+)\n' replace = r'<li>\1</li>\n' text = re.sub(match, replace, text) # Set start of list text = text.replace('<li>', '</p><ul><li>', 1) # Set end of list tmp = text.rsplit('</li>', 1) return '</li></ul><p>'.join(tmpf list2html(text): ''' Very simple replacement for lists, no nesting, not even two lists in the same 'text'... (yet sufficient for the current regulations) Assumes list is in a paragraph. ''' match = r'- (.+)\n' replace = r'<li>\1</li>\n' text = re.sub(match, replace, text) # Set start of list text = text.replace('<li>', '</p><ul><li>', 1) # Set end of list tmp = text.rsplit('</li>', 1) return '</li></ul><p>'.join(tmp)
Very simple replacement for lists, no nesting, not even two lists in the same 'text'... (yet sufficient for the current regulations) Assumes list is in a paragraph.
def link2html(text): ''' Turns md links to html ''' match = r'\[([^\]]+)\]\(([^)]+)\)' replace = r'<a href="\2">\1</a>' return re.sub(match, replace, textf link2html(text): ''' Turns md links to html ''' match = r'\[([^\]]+)\]\(([^)]+)\)' replace = r'<a href="\2">\1</a>' return re.sub(match, replace, text)
Turns md links to html
def simple_md2html(text, urls): ''' Convert a text from md to html ''' retval = special_links_replace(text, urls) # Create a par break for double newlines retval = re.sub(r'\n\n', r'</p><p>', retval) # Create a visual br for every new line retval = re.sub(r'\n', r'<br />\n', retval) # Do we really need this ? Help reduce the diff to only '\n' diff. retval = re.sub(r'"', r'&quot;', retval) retval = list2html(retval) return link2html(retvalf simple_md2html(text, urls): ''' Convert a text from md to html ''' retval = special_links_replace(text, urls) # Create a par break for double newlines retval = re.sub(r'\n\n', r'</p><p>', retval) # Create a visual br for every new line retval = re.sub(r'\n', r'<br />\n', retval) # Do we really need this ? Help reduce the diff to only '\n' diff. retval = re.sub(r'"', r'&quot;', retval) retval = list2html(retval) return link2html(retval)
Convert a text from md to html
def generate_ul(self, a_list): ''' Determines if we should generate th 'ul' around the list 'a_list' ''' return len(a_list) > 0 and (isinstance(a_list[0], Rule) or isinstance(a_list[0], LabelDecl)f generate_ul(self, a_list): ''' Determines if we should generate th 'ul' around the list 'a_list' ''' return len(a_list) > 0 and (isinstance(a_list[0], Rule) or isinstance(a_list[0], LabelDecl))
Determines if we should generate th 'ul' around the list 'a_list'
def get_version_info(): version_info = {} with open(os.path.join("refcycle", "version.py"), 'r') as f: version_code = compile(f.read(), "version.py", 'exec') exec(version_code, version_info) return version_info
Extract version information as a dictionary from version.py.
def div_filter(key: str, value: list, format: str, meta: Any) -> Optional[list]: if key != "Div" or format != "latex": return None [[_, classes, _], contents] = value try: alert_type = [name.split("-")[1] for name in classes if "-" in name][0] except IndexError: return None if alert_type not in ALLOWED_ALERT_TYPES.__members__: return None filtered = [RawBlock("latex", rf"\begin{{{alert_type}box}}")] filtered.extend(contents) filtered.append(RawBlock("latex", rf"\end{{{alert_type}box}}")) return filtered
Filter the JSON ``value`` for alert divs. Arguments --------- key Key of the structure value Values in the structure format Output format of the processing meta Meta information
def convert_div(text: str, format: Optional[str] = None) -> "applyJSONFilters": return applyJSONFilters([div_filter], text, format=format)
Apply the `dev_filter` action to the text.
def raw_html_filter(key: str, value: list, format: str, meta: Any) -> Optional[list]: if key == "RawInline" and format == "latex" and value[0] == "html": if value[1] == "<sup>": filtered = [RawInline("latex", r"\textsuperscript{")] elif value[1] == "</sup>": filtered = [RawInline("latex", "}")] elif value[1] == "<sub>": filtered = [RawInline("latex", r"\textsubscript{")] elif value[1] == "</sub>": filtered = [RawInline("latex", "}")] else: return None return filtered return None
Filter the JSON ``value`` for raw html to convert to LaTeX. Arguments --------- key Key of the structure value Values in the structure format Output format of the processing meta Meta information
def convert_raw_html(text: str, format: Optional[str] = None) -> "applyJSONFilters": return applyJSONFilters([raw_html_filter], text, format=format)
Apply the `raw_html_filter` action to the text.
def add(self, element): key = self._transform(element) if key not in self._elements: self._elements[key] = element
Add an element to this set.
def discard(self, element): key = self._transform(element) if key in self._elements: del self._elements[key]
Remove an element. Do not raise an exception if absent.
def add_items_to_message(msg, log_dict): out = msg for key, value in log_dict.items(): out += " {}={}".format(key, value) return out
Utility function to add dictionary items to a log message.
def log_event(event, logger=root_logger, **log_dict): msg = "event={}".format(event) msg = add_items_to_message(msg, log_dict) log_dict.update({'event': event}) logger.info(msg, extra=log_dict)
Utility function for logging an event (e.g. for metric analysis). If no logger is given, fallback to the root logger.
def metric(cls, name, count, elapsed): if name is None: warnings.warn("Ignoring unnamed metric", stacklevel=3) return with cls.lock: if not cls.instances: # first call shutil.rmtree(cls.outdir, ignore_errors=True) os.makedirs(cls.outdir) if cls.dump_atexit: atexit.register(cls.dump) try: self = cls.instances[name] except KeyError: self = cls.instances[name] = cls(name) self.writer.writerow((count, "%f"%elapsed))
A metric function that writes multiple CSV files :arg str name: name of the metric :arg int count: number of items :arg float elapsed: time in seconds
def dump(cls): with cls.lock: if not cls.instances: return atexit.unregister(cls.dump) for self in cls.instances.values(): self.fh.close()
Output all recorded metrics
def metric(self, name, count, elapsed): if name is None: warnings.warn("Ignoring unnamed metric", stacklevel=3) return with self.lock: self.writer.writerow((name, count, "%f"%elapsed))
A metric function that writes a single CSV file :arg str name: name of the metric :arg int count: number of items :arg float elapsed: time in seconds
def dump(self): with self.lock: atexit.unregister(self.dump) self.fh.close()
Output all recorded metrics
def read(parts): cur_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(cur_dir, *parts), "rb", "utf-8") as f: return f.read()
Build an absolute path from parts array and and return the contents of the resulting file. Assume UTF-8 encoding.
def find_meta(meta): meta_match = re.search( r"^__{meta}__ = ['\"]([^'\"]*)['\"]".format(meta=meta), META_FILE, re.M ) if meta_match: return meta_match.group(1) raise RuntimeError("Unable to find __{meta}__ string.".format(meta=meta))
Extract __*meta*__ from META_FILE.
def ensure_clean_git(operation='operation'): if os.system('git diff-index --quiet HEAD --'): print("Unstaged or uncommitted changes detected. {} aborted.".format( operation.capitalize())) sys.exit()
Verify that git has no uncommitted changes
def load(self): if self._defaults_file is not None: if not os.path.exists(self._defaults_file): msg = "Unable to find defaults file: {}".format(self._defaults_file) LOGGER.error(msg) raise RuntimeError(msg) with open(self._defaults_file, 'r') as handle: self._defaults = json.load(handle) self.update(self._defaults) if self._settings_file is None: msg = "No context file has been provided" LOGGER.error(msg) raise RuntimeError(msg) if not os.path.exists(self._settings_file): msg = "Unable to find settings file: {}".format(self._settings_file) LOGGER.error(msg) raise RuntimeError(msg) with open(self._settings_file, 'r') as handle: settings = json.load(handle) update(self, settings) return
_load_ Load the defaults file if specified and overlay the json file on top of that
def hasmethod(obj, meth): if hasattr(obj, meth): return callable(getattr(obj,meth)) return False
Checks if an object, obj, has a callable method, meth return True or False
def hasvar(obj, var): if hasattr(obj, var): return not callable(getattr(obj, var)) return False
Checks if object, obj has a variable var return True or False
def getmethattr(obj, meth): if hasmethod(obj, meth): return getattr(obj, meth)() elif hasvar(obj, meth): return getattr(obj, meth) return None
Returns either the variable value or method invocation
def assure_obj_child_dict(obj, var): if not var in obj or type(obj[var]) != type({}): obj[var] = {} return obj
Assure the object has the specified child dict
def warmup(f): @wraps(f) def wrapped(self, *args, **kwargs): if not self.warmed_up: self.warmup() return f(self, *args, **kwargs) return wrapped
Decorator to run warmup before running a command
def install_required(f): @wraps(f) def wrapped(self, *args, **kwargs): if self.directory.new: raise SprinterException("Namespace %s is not yet installed!" % self.namespace) return f(self, *args, **kwargs) return wrapped
Return an exception if the namespace is not already installed
def install(self): self.phase = PHASE.INSTALL if not self.directory.new: self.logger.info("Namespace %s directory already exists!" % self.namespace) self.source = load_manifest(self.directory.manifest_path) return self.update() try: self.logger.info("Installing environment %s..." % self.namespace) self.directory.initialize() self.install_sandboxes() self.instantiate_features() self.grab_inputs() self._specialize() for feature in self.features.run_order: self.run_action(feature, 'sync') self.inject_environment_config() self._finalize() except Exception: self.logger.debug("", exc_info=sys.exc_info()) self.logger.info("An error occured during installation!") if not self.ignore_errors: self.clear_all() self.logger.info("Removing installation %s..." % self.namespace) self.directory.remove() et, ei, tb = sys.exc_info() reraise(et, ei, tb)
Install the environment
def update(self, reconfigure=False): try: self.phase = PHASE.UPDATE self.logger.info("Updating environment %s..." % self.namespace) self.install_sandboxes() self.instantiate_features() # We don't grab inputs, only on install # updates inputs are grabbed on demand # self.grab_inputs(reconfigure=reconfigure) if reconfigure: self.grab_inputs(reconfigure=True) else: self._copy_source_to_target() self._specialize(reconfigure=reconfigure) for feature in self.features.run_order: self.run_action(feature, 'sync') self.inject_environment_config() self._finalize() except Exception: self.logger.debug("", exc_info=sys.exc_info()) et, ei, tb = sys.exc_info() reraise(et, ei, tb)
update the environment
def remove(self): try: self.phase = PHASE.REMOVE self.logger.info("Removing environment %s..." % self.namespace) self.instantiate_features() self._specialize() for feature in self.features.run_order: try: self.run_action(feature, 'sync') except FormulaException: # continue trying to remove any remaining features. pass self.clear_all() self.directory.remove() self.injections.commit() if self.error_occured: self.logger.error(warning_template) self.logger.error(REMOVE_WARNING) except Exception: self.logger.debug("", exc_info=sys.exc_info()) et, ei, tb = sys.exc_info() reraise(et, ei, tb)
remove the environment
def deactivate(self): try: self.phase = PHASE.DEACTIVATE self.logger.info("Deactivating environment %s..." % self.namespace) self.directory.rewrite_config = False self.instantiate_features() self._specialize() for feature in self.features.run_order: self.logger.info("Deactivating %s..." % feature[0]) self.run_action(feature, 'deactivate') self.clear_all() self._finalize() except Exception: self.logger.debug("", exc_info=sys.exc_info()) et, ei, tb = sys.exc_info() reraise(et, ei, tb)
deactivate the environment
def validate(self): self.phase = PHASE.VALIDATE self.logger.info("Validating %s..." % self.namespace) self.instantiate_features() context_dict = {} if self.target: for s in self.target.formula_sections(): context_dict["%s:root_dir" % s] = self.directory.install_directory(s) context_dict['config:root_dir'] = self.directory.root_dir context_dict['config:node'] = system.NODE self.target.add_additional_context(context_dict) for feature in self.features.run_order: self.run_action(feature, 'validate', run_if_error=True)
Validate the target environment
def clear_all(self): self.injections.clear_all() for config_file in CONFIG_FILES: self.injections.clear(os.path.join("~", config_file))
clear all files that were to be injected
def write_debug_log(self, file_path): with open(file_path, "wb+") as fh: fh.write(system.get_system_info().encode('utf-8')) # writing to debug stream self._debug_stream.seek(0) fh.write(self._debug_stream.read().encode('utf-8')) fh.write("The following errors occured:\n".encode('utf-8')) for error in self._errors: fh.write((error + "\n").encode('utf-8')) for k, v in self._error_dict.items(): if len(v) > 0: fh.write(("Error(s) in %s with formula %s:\n" % k).encode('utf-8')) for error in v: fh.write((error + "\n").encode('utf-8'))
Write the debug log to a file
def write_manifest(self): if os.path.exists(self.directory.manifest_path): self.main_manifest.write(open(self.directory.manifest_path, "w+"))
Write the manifest to the file
def message_failure(self): if not isinstance(self.main_manifest, Manifest): return None return self.main_manifest.get('config', 'message_failure', default=None)
return a failure message, if one exists
def warmup(self): self.logger.debug("Warming up...") try: if not isinstance(self.source, Manifest) and self.source: self.source = load_manifest(self.source) if not isinstance(self.target, Manifest) and self.target: self.target = load_manifest(self.target) self.main_manifest = self.target or self.source except lib.BadCredentialsException: e = sys.exc_info()[1] self.logger.error(str(e)) raise SprinterException("Fatal error! Bad credentials to grab manifest!") if not getattr(self, 'namespace', None): if self.target: self.namespace = self.target.namespace elif not self.namespace and self.source: self.namespace = self.source.namespace else: raise SprinterException("No environment name has been specified!") self.directory_root = self.custom_directory_root if not self.directory: if not self.directory_root: self.directory_root = os.path.join(self.root, self.namespace) self.directory = Directory(self.directory_root, shell_util_path=self.shell_util_path) if not self.injections: self.injections = Injections(wrapper="%s_%s" % (self.sprinter_namespace.upper(), self.namespace), override="SPRINTER_OVERRIDES") if not self.global_injections: self.global_injections = Injections(wrapper="%s" % self.sprinter_namespace.upper() + "GLOBALS", override="SPRINTER_OVERRIDES") # append the bin, in the case sandboxes are necessary to # execute commands further down the sprinter lifecycle os.environ['PATH'] = self.directory.bin_path() + ":" + os.environ['PATH'] self.warmed_up = True
initialize variables necessary to perform a sprinter action
def _inject_config_source(self, source_filename, files_to_inject): # src_path = os.path.join(self.directory.root_dir, source_filename) # src_exec = "[ -r %s ] && . %s" % (src_path, src_path) src_exec = "[ -r {0} ] && . {0}".format(os.path.join(self.directory.root_dir, source_filename)) # The ridiculous construction above is necessary to avoid failing tests(!) for config_file in files_to_inject: config_path = os.path.expanduser(os.path.join("~", config_file)) if os.path.exists(config_path): self.injections.inject(config_path, src_exec) break else: config_file = files_to_inject[0] config_path = os.path.expanduser(os.path.join("~", config_file)) self.logger.info("No config files found to source %s, creating ~/%s!" % (source_filename, config_file)) self.injections.inject(config_path, src_exec) return (config_file, config_path)
Inject existing environmental config with namespace sourcing. Returns a tuple of the first file name and path found.
def _finalize(self): self.logger.info("Finalizing...") self.write_manifest() if self.directory.rewrite_config: # always ensure .rc is written (sourcing .env) self.directory.add_to_rc('') # prepend brew for global installs if system.is_osx() and self.main_manifest.is_affirmative('config', 'use_global_packagemanagers'): self.directory.add_to_env('__sprinter_prepend_path "%s" PATH' % '/usr/local/bin') self.directory.add_to_env('__sprinter_prepend_path "%s" PATH' % self.directory.bin_path()) self.directory.add_to_env('__sprinter_prepend_path "%s" LIBRARY_PATH' % self.directory.lib_path()) self.directory.add_to_env('__sprinter_prepend_path "%s" C_INCLUDE_PATH' % self.directory.include_path()) self.directory.finalize() self.injections.commit() self.global_injections.commit() if not os.path.exists(os.path.join(self.root, ".global")): self.logger.debug("Global directory doesn't exist! creating...") os.makedirs(os.path.join(self.root, ".global")) self.logger.debug("Writing shell util file...") with open(self.shell_util_path, 'w+') as fh: fh.write(shell_utils_template) if self.error_occured: raise SprinterException("Error occured!") if self.message_success(): self.logger.info(self.message_success()) self.logger.info("Done!") self.logger.info("NOTE: Please remember to open new shells/terminals to use the modified environment")
command to run at the end of sprinter's run
def _build_logger(self, level=logging.INFO): self._debug_stream = StringIO() logger = logging.getLogger('sprinter') # stdout log out_hdlr = logging.StreamHandler(sys.stdout) out_hdlr.setLevel(level) logger.addHandler(out_hdlr) # debug log debug_hdlr = logging.StreamHandler(self._debug_stream) debug_hdlr.setFormatter(logging.Formatter('%(asctime)s %(message)s')) debug_hdlr.setLevel(logging.DEBUG) logger.addHandler(debug_hdlr) logger.setLevel(logging.DEBUG) return logger
return a logger. if logger is none, generate a logger from stdout
def run_action(self, feature, action, run_if_error=False, raise_exception=True): if len(self._error_dict[feature]) > 0 and not run_if_error: return error = None instance = self.features[feature] try: getattr(instance, action)() # catch a generic exception within a feature except Exception as e: e = sys.exc_info()[1] self.logger.info("An exception occurred with action %s in feature %s!" % (action, feature)) self.logger.debug("Exception", exc_info=sys.exc_info()) error = str(e) self.log_feature_error(feature, str(e)) # any error in a feature should fail immediately - unless it occurred # from the remove() method in which case continue the rest of the # feature removal from there if error is not None and raise_exception: exception_msg = "%s action failed for feature %s: %s" % (action, feature, error) if self.phase == PHASE.REMOVE: raise FormulaException(exception_msg) else: raise SprinterException(exception_msg) return error
Run an action, and log it's output in case of errors
def _specialize(self, reconfigure=False): # add in the 'root_dir' directories to the context dictionaries for manifest in [self.source, self.target]: context_dict = {} if manifest: for s in manifest.formula_sections(): context_dict["%s:root_dir" % s] = self.directory.install_directory(s) context_dict['config:root_dir'] = self.directory.root_dir context_dict['config:node'] = system.NODE manifest.add_additional_context(context_dict) self._validate_manifest() for feature in self.features.run_order: if not reconfigure: self.run_action(feature, 'resolve') # if a target doesn't exist, no need to prompt. instance = self.features[feature] if instance.target: self.run_action(feature, 'prompt')
Add variables and specialize contexts
def _copy_source_to_target(self): if self.source and self.target: for k, v in self.source.items('config'): # always have source override target. self.target.set_input(k, v)
copy source user configuration to target
def grab_inputs(self, reconfigure=False): self._copy_source_to_target() if self.target: self.target.grab_inputs(force=reconfigure)
Resolve the source and target config section
def parse(ignore_file='.gitignore', git_dir='.git', additional_files=(), global_=True, root_dir=None, defaults=True): result = IgnoreListCollection() if root_dir is None: if git_dir is None: raise ValueError("root_dir or git_dir must be specified") root_dir = os.path.dirname(os.path.abspath(git_dir)) def parse(filename, root=None): if os.path.isfile(filename): if root is None: root = os.path.dirname(os.path.abspath(filename)) with open(filename) as fp: result.parse(fp, root) result.append(IgnoreList(root_dir)) if ignore_file is not None: parse(ignore_file) for filename in additional_files: parse(filename) if git_dir is not None: parse(os.path.join(git_dir, 'info', 'exclude'), root_dir) if global_: # TODO: Read the core.excludeFiles configuration value. parse(os.path.expanduser('~/.gitignore'), root_dir) if defaults: result.append(get_defaults(root_dir)) return result
Collects a list of all ignore patterns configured in a local Git repository as specified in the Git documentation. See https://git-scm.com/docs/gitignore#_description The returned #IgnoreListCollection is guaranteed to contain at least one #IgnoreList with #IgnoreList.root pointing to the specified *root_dir* (which defaults to the parent directory of *git_dir*) as the first element.
def walk(patterns, dirname): join = os.path.join for root, dirs, files in os.walk(dirname, topdown=True): dirs[:] = [d for d in dirs if patterns.match(join(root, d), True) != MATCH_IGNORE] files[:] = [f for f in files if patterns.match(join(root, f), False) != MATCH_IGNORE] yield root, dirs, files
Like #os.walk(), but filters the files and directories that are excluded by the specified *patterns*. # Arguments patterns (IgnoreList, IgnoreListCollection): Can also be any object that implements the #IgnoreList.match() interface. dirname (str): The directory to walk.
def parse(self, lines): if isinstance(lines, str): lines = lines.split('\n') sub = _re.sub for line in lines: if line.endswith('\n'): line = line[:-1] line = line.lstrip() if not line.startswith('#'): invert = False if line.startswith('!'): line = line[1:] invert = True while line.endswith(' ') and line[-2:] != '\ ': line = line[:-1] line = sub(r'\\([!# ])', r'\1', line) if '/' in line and not line.startswith('/'): # Patterns with a slash can only be matched absolute. line = '/' + line self.patterns.append(Pattern(line, invert))
Parses the `.gitignore` file represented by the *lines*.
def match(self, filename, isdir): fnmatch = _fnmatch.fnmatch ignored = False filename = self.convert_path(filename) basename = os.path.basename(filename) for pattern in self.patterns: if pattern.dir_only and not isdir: continue if (not ignored or pattern.invert) and pattern.match(filename): if pattern.invert: # This file is definitely NOT ignored, no matter what other patterns match return MATCH_INCLUDE ignored = True if ignored: return MATCH_IGNORE else: return MATCH_DEFAULT
Match the specified *filename*. If *isdir* is False, directory-only patterns will be ignored. Returns one of - #MATCH_DEFAULT - #MATCH_IGNORE - #MATCH_INCLUDE
def parse(self, lines, root): lst = IgnoreList(root) lst.parse(lines) self.append(lst)
Shortcut for #IgnoreList.parse() and #IgnoreListCollection.append().
def match(self, filename, isdir=False): for lst in self: result = lst.match(filename, isdir) if result != MATCH_DEFAULT: return result return MATCH_DEFAULT
Match all the #IgnoreList#s` in this collection. Returns one of - #MATCH_DEFAULT - #MATCH_IGNORE - #MATCH_INCLUDE
def reply(self,message,message_type): if message_type == MULTIPART: raise Exception("Unsupported reply type") super(Replier,self).send(message,message_type)
Send a reply message of the given type Args: - message: the message to publish - message_type: the type of message being sent
def parse_domain(url): domain_match = lib.DOMAIN_REGEX.match(url) if domain_match: return domain_match.group()
parse the domain from the url
def get_credentials(options, environment): if options['--username'] or options['--auth']: if not options['--username']: options['<username>'] = lib.prompt("Please enter the username for %s..." % environment) if not options['--password']: options['<password>'] = lib.prompt("Please enter the password for %s..." % environment, secret=True) return options
Get credentials or prompt for them from options
def check_type(self, value): if self.null and value is None: return if self.type is not None and not isinstance(value, self.type): msg = '{0!r} expected type {1}' raise TypeError(msg.format(self.full_name, self.type.__name__))
Raises a #TypeError if *value* is not an instance of the field's #type.
def get_default(self): if self.default is not NotImplemented: return self.default elif self.default_factory is not None: return self.default_factory() else: raise RuntimeError('{0!r} has no default value'.format(self.full_name))
Return the default value of the field. Returns either #default, the return value of #default_factory or raises a #RuntimeError if the field has no default value.
def full_name(self): entity = self.entity.__name__ if self.entity is not None else None name = self.name if self.name is not None else None if entity and name: return entity + '.' + name elif entity: return entity + '.<unnamed>' elif name: return '<unbound>.' + name else: return '<unbound>.<unnamed>'
The full name of the field. This is the field's entities name concatenated with the field's name. If the field is unnamed or not bound to an entity, the result respectively contains None.
def type_name(self): res = self.type.__name__ if self.type.__module__ not in ('__builtin__', 'builtins'): res = self.type.__module__ + '.' + res return res
Returns the full type identifier of the field.
def get_subclass_from_module(module, parent_class): try: r = __recursive_import(module) member_dict = dict(inspect.getmembers(r)) sprinter_class = parent_class for v in member_dict.values(): if inspect.isclass(v) and issubclass(v, parent_class) and v != parent_class: if sprinter_class is parent_class: sprinter_class = v if sprinter_class is None: raise SprinterException("No subclass %s that extends %s exists in classpath!" % (module, str(parent_class))) return sprinter_class except ImportError: e = sys.exc_info()[1] raise e
Get a subclass of parent_class from the module at module get_subclass_from_module performs reflection to find the first class that extends the parent_class in the module path, and returns it.
def __recursive_import(module_name): names = module_name.split(".") path = None module = None while len(names) > 0: if module: path = module.__path__ name = names.pop(0) (module_file, pathname, description) = imp.find_module(name, path) module = imp.load_module(name, module_file, pathname, description) return module
Recursively looks for and imports the names, returning the module desired >>> __recursive_import("sprinter.formula.unpack") # doctest: +ELLIPSIS <module 'unpack' from '...'> currently module with relative imports don't work.
def err_exit(msg, rc=1): print(msg, file=sys.stderr) sys.exit(rc)
Print msg to stderr and exit with rc.
def popen(self, cmd): process = Popen(cmd, shell=True, stdout=PIPE, env=self.env) stdoutdata, stderrdata = process.communicate() return process.returncode, stdoutdata
Execute an external command and return (rc, output).
def read_file(self, infile): try: with open(infile, 'rt') as file: return file.read() except UnicodeDecodeError as e: err_exit('Error reading %s: %s' % (infile, e)) except (IOError, OSError) as e: err_exit('Error reading %s: %s' % (infile, e.strerror or e))
Read a reST file into a string.
def write_file(self, html, outfile): try: with open(outfile, 'wt') as file: file.write(html) except (IOError, OSError) as e: err_exit('Error writing %s: %s' % (outfile, e.strerror or e))
Write an HTML string to a file.
def convert_string(self, rest): try: html = publish_string(rest, writer_name='html') except SystemExit as e: err_exit('HTML conversion failed with error: %s' % e.code) else: if sys.version_info[0] >= 3: return html.decode('utf-8') return html
Convert a reST string to an HTML string.
def apply_styles(self, html, styles): index = html.find('</head>') if index >= 0: return ''.join((html[:index], styles, html[index:])) return html
Insert style information into the HTML string.
def publish_string(self, rest, outfile, styles=''): html = self.convert_string(rest) html = self.strip_xml_header(html) html = self.apply_styles(html, styles) self.write_file(html, outfile) return outfile
Render a reST string as HTML.
def publish_file(self, infile, outfile, styles=''): rest = self.read_file(infile) return self.publish_string(rest, outfile, styles)
Render a reST file as HTML.
def upgrade(self): warn('Upgrading ' + self.filename) if self.backup_config(self.filename): return self.write_default_config(self.filename) return False
Upgrade the config file.
def backup_config(self, filename): backup_name = filename + '-' + self.version warn('Moving current configuration to ' + backup_name) try: shutil.copy2(filename, backup_name) return True except (IOError, OSError) as e: print('Error copying %s: %s' % (filename, e.strerror or e), file=sys.stderr) return False
Backup the current config file.
def write_default_config(self, filename): try: with open(filename, 'wt') as file: file.write(DEFAULT_CONFIG) return True except (IOError, OSError) as e: print('Error writing %s: %s' % (filename, e.strerror or e), file=sys.stderr) return False
Write the default config file.
def set_defaults(self, config_file): self.defaults = Defaults(config_file) self.python = Python() self.setuptools = Setuptools() self.docutils = Docutils() self.styles = self.defaults.styles self.browser = self.defaults.browser self.list = False
Set defaults.
def reset_defaults(self, config_file): if not exists(config_file): err_exit('No such file: %(config_file)s' % locals()) if not isfile(config_file): err_exit('Not a file: %(config_file)s' % locals()) if not os.access(config_file, os.R_OK): err_exit('File cannot be read: %(config_file)s' % locals()) self.set_defaults(config_file)
Reset defaults.
def write_defaults(self): self.defaults.write() self.reset_defaults(self.defaults.filename)
Create default config file and reload.
def upgrade_defaults(self): self.defaults.upgrade() self.reset_defaults(self.defaults.filename)
Upgrade config file and reload.
def parse_options(self, args, depth=0): style_names = tuple(self.defaults.known_styles) style_opts = tuple('--'+x for x in style_names) try: options, remaining_args = getopt.gnu_getopt(args, 'b:c:hls:v', ('help', 'style=', 'version', 'list-styles', 'browser=', 'config-file=') + style_names) except getopt.GetoptError as e: err_exit('viewdoc: %s\n%s' % (e.msg, USAGE)) for name, value in options: if name in ('-s', '--style'): self.styles = self.defaults.known_styles.get(value, '') elif name in style_opts: self.styles = self.defaults.known_styles.get(name[2:], '') elif name in ('-b', '--browser'): self.browser = value elif name in ('-l', '--list-styles'): self.list = True elif name in ('-h', '--help'): msg_exit(HELP) elif name in ('-v', '--version'): msg_exit(VERSION) elif name in ('-c', '--config-file') and depth == 0: self.reset_defaults(expanduser(value)) return self.parse_options(args, depth+1) if len(remaining_args) > 1: err_exit('viewdoc: too many arguments\n%s' % USAGE) if not isfile(self.defaults.filename) and depth == 0: self.write_defaults() return self.parse_options(args, depth+1) if self.defaults.version < CONFIG_VERSION and depth == 0: self.upgrade_defaults() return self.parse_options(args, depth+1) if self.list: self.list_styles() return remaining_args
Parse command line options.