response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
``['$XONSH_SYS_CONFIG_DIR/xonshrc', '$XONSH_CONFIG_DIR/xonsh/rc.xsh', '~/.xonshrc']``
def default_xonshrc(env) -> "tuple[str, ...]": """ ``['$XONSH_SYS_CONFIG_DIR/xonshrc', '$XONSH_CONFIG_DIR/xonsh/rc.xsh', '~/.xonshrc']`` """ dxrc = ( os.path.join(xonsh_sys_config_dir(env), "xonshrc"), os.path.join(xonsh_config_dir(env), "rc.xsh"), os.path.expanduser("~/.xonshrc"), ) # Check if old config file exists and issue warning old_config_filename = xonshconfig(env) if os.path.isfile(old_config_filename): print( "WARNING! old style configuration (" + old_config_filename + ") is no longer supported. " + "Please migrate to xonshrc." ) return dxrc
``['$XONSH_SYS_CONFIG_DIR/rc.d', '$XONSH_CONFIG_DIR/rc.d']``
def default_xonshrcdir(env): """``['$XONSH_SYS_CONFIG_DIR/rc.d', '$XONSH_CONFIG_DIR/rc.d']``\n""" return get_config_paths(env, "rc.d")
By default, the following paths are searched. 1. ``$XONSH_CONFIG_DIR/completions`` - user level completions 2. ``$XONSH_SYS_CONFIG_DIR/completions`` - system level completions 3. ``$XONSH_DATA_DIR/generated_completions`` - auto generated completers from man pages 4. ``$XDG_DATA_DIRS/xonsh/vendor_completions`` - completers from other programs can be placed here. Other than this, Python package namespace ``xompletions`` can be used to put completer modules as well.
def default_completer_dirs(env): """By default, the following paths are searched. 1. ``$XONSH_CONFIG_DIR/completions`` - user level completions 2. ``$XONSH_SYS_CONFIG_DIR/completions`` - system level completions 3. ``$XONSH_DATA_DIR/generated_completions`` - auto generated completers from man pages 4. ``$XDG_DATA_DIRS/xonsh/vendor_completions`` - completers from other programs can be placed here. Other than this, Python package namespace ``xompletions`` can be used to put completer modules as well. """ # inspired from - https://fishshell.com/docs/current/completions.html#where-to-put-completions return [ os.path.join(env["XONSH_CONFIG_DIR"], "completions"), os.path.join(env["XONSH_SYS_CONFIG_DIR"], "completions"), os.path.join(env["XONSH_DATA_DIR"], "generated_completions"), ] + [ os.path.join(parent, "xonsh", "vendor_completions") for parent in env["XDG_DATA_DIRS"] ]
Appends a newline if we are in interactive mode
def xonsh_append_newline(env): """Appends a newline if we are in interactive mode""" return env.get("XONSH_INTERACTIVE", False)
Gets a default instanse of LsColors
def default_lscolors(env): """Gets a default instanse of LsColors""" inherited_lscolors = os_environ.get("LS_COLORS", None) if inherited_lscolors is None: lsc = LsColors.fromdircolors() else: lsc = LsColors.fromstring(inherited_lscolors) # have to place this in the env, so it is applied env["LS_COLORS"] = lsc return lsc
``xonsh.prompt.PROMPT_FIELDS``
def default_prompt_fields(env): """``xonsh.prompt.PROMPT_FIELDS``""" # todo: generate document for all default fields return prompt.PromptFields(XSH)
Locates an executable on the file system.
def locate_binary(name): """Locates an executable on the file system.""" return XSH.commands_cache.locate_binary(name)
Attempts to read in all xonshrc files (and search xonshrc directories), and returns the list of rc file paths successfully loaded, in the order of loading.
def xonshrc_context( rcfiles=None, rcdirs=None, execer=None, ctx=None, env=None, login=True ): """ Attempts to read in all xonshrc files (and search xonshrc directories), and returns the list of rc file paths successfully loaded, in the order of loading. """ loaded = [] ctx = {} if ctx is None else ctx orig_thread = env.get("THREAD_SUBPROCS") env["THREAD_SUBPROCS"] = None if rcfiles is not None: for rcfile in rcfiles: if os.path.isfile(rcfile): status = xonsh_script_run_control( rcfile, ctx, env, execer=execer, login=login ) if status: loaded.append(rcfile) if rcdirs is not None: for rcdir in rcdirs: for rcfile in sorted(dict(scan_dir_for_source_files(rcdir))): status = xonsh_script_run_control( rcfile, ctx, env, execer=execer, login=login ) if status: loaded.append(rcfile) if env["THREAD_SUBPROCS"] is None: env["THREAD_SUBPROCS"] = orig_thread return loaded
Environment fixes for Windows. Operates in-place.
def windows_foreign_env_fixes(ctx): """Environment fixes for Windows. Operates in-place.""" # remove these bash variables which only cause problems. for ev in ["HOME", "OLDPWD"]: if ev in ctx: del ctx[ev] # Override path-related bash variables; on Windows bash uses # /c/Windows/System32 syntax instead of C:\\Windows\\System32 # which messes up these environment variables for xonsh. for ev in ["PATH", "TEMP", "TMP"]: if ev in os_environ: ctx[ev] = os_environ[ev] elif ev in ctx: del ctx[ev] ctx["PWD"] = _get_cwd() or ""
Environment fixes for all operating systems
def foreign_env_fixes(ctx): """Environment fixes for all operating systems""" if "PROMPT" in ctx: del ctx["PROMPT"]
Loads a xonsh file and applies it as a run control. Any exceptions are logged here, returns boolean indicating success.
def xonsh_script_run_control(filename, ctx, env, execer=None, login=True): """Loads a xonsh file and applies it as a run control. Any exceptions are logged here, returns boolean indicating success. """ if execer is None: return False updates = {"__file__": filename, "__name__": os.path.abspath(filename)} rc_dir = _RcPath(os.path.dirname(filename)) sys.path.append(rc_dir) with swap_values(ctx, updates): try: exc_info = run_script_with_cache(filename, execer, ctx) except SyntaxError: exc_info = sys.exc_info() if exc_info != (None, None, None): err_type, err, _ = exc_info loaded = False if err_type is SyntaxError: msg = "syntax error in xonsh run control file {0!r}: {1!s}" else: msg = "error running xonsh run control file {0!r}: {1!s}" print_exception(msg.format(filename, err), exc_info=exc_info) else: loaded = True sys.path = list(filter(lambda p: p is not rc_dir, sys.path)) return loaded
Constructs a default xonsh environment.
def default_env(env=None): """Constructs a default xonsh environment.""" # in order of increasing precedence ctx = { "BASH_COMPLETIONS": list(DEFAULT_VARS["BASH_COMPLETIONS"].default), "PROMPT_FIELDS": DEFAULT_VARS["PROMPT_FIELDS"].default(env), "XONSH_VERSION": XONSH_VERSION, } ctx.update(os_environ) ctx["PWD"] = _get_cwd() or "" # These can cause problems for programs (#2543) ctx.pop("LINES", None) ctx.pop("COLUMNS", None) # other shells' PROMPT definitions generally don't work in XONSH: try: del ctx["PROMPT"] except KeyError: pass # increment $SHLVL old_shlvl = to_shlvl(ctx.get("SHLVL", None)) ctx["SHLVL"] = adjust_shlvl(old_shlvl, 1) # finalize env if env is not None: ctx.update(env) return ctx
Makes a dictionary containing the $ARGS and $ARG<N> environment variables. If the supplied ARGS is None, then sys.argv is used.
def make_args_env(args=None): """Makes a dictionary containing the $ARGS and $ARG<N> environment variables. If the supplied ARGS is None, then sys.argv is used. """ if args is None: args = sys.argv env = {"ARG" + str(i): arg for i, arg in enumerate(args)} env["ARGS"] = list(args) # make a copy so we don't interfere with original variable return env
Extracts data from a foreign (non-xonsh) shells. Currently this gets the environment, aliases, and functions but may be extended in the future. Parameters ---------- shell : str The name of the shell, such as 'bash' or '/bin/sh'. interactive : bool, optional Whether the shell should be run in interactive mode. login : bool, optional Whether the shell should be a login shell. envcmd : str or None, optional The command to generate environment output with. aliascmd : str or None, optional The command to generate alias output with. extra_args : tuple of str, optional Additional command line options to pass into the shell. currenv : tuple of items or None, optional Manual override for the current environment. safe : bool, optional Flag for whether or not to safely handle exceptions and other errors. prevcmd : str, optional A command to run in the shell before anything else, useful for sourcing and other commands that may require environment recovery. postcmd : str, optional A command to run after everything else, useful for cleaning up any damage that the prevcmd may have caused. funcscmd : str or None, optional This is a command or script that can be used to determine the names and locations of any functions that are native to the foreign shell. This command should print *only* a JSON object that maps function names to the filenames where the functions are defined. If this is None, then a default script will attempted to be looked up based on the shell name. Callable wrappers for these functions will be returned in the aliases dictionary. sourcer : str or None, optional How to source a foreign shell file for purposes of calling functions in that shell. If this is None, a default value will attempt to be looked up based on the shell name. use_tmpfile : bool, optional This specifies if the commands are written to a tmp file or just parsed directly to the shell tmpfile_ext : str or None, optional If tmpfile is True this sets specifies the extension used. runcmd : str or None, optional Command line switches to use when running the script, such as -c for Bash and /C for cmd.exe. seterrprevcmd : str or None, optional Command that enables exit-on-error for the shell that is run at the start of the script. For example, this is "set -e" in Bash. To disable exit-on-error behavior, simply pass in an empty string. seterrpostcmd : str or None, optional Command that enables exit-on-error for the shell that is run at the end of the script. For example, this is "if errorlevel 1 exit 1" in cmd.exe. To disable exit-on-error behavior, simply pass in an empty string. show : bool, optional Whether or not to display the script that will be run. dryrun : bool, optional Whether or not to actually run and process the command. files : tuple of str, optional Paths to source. Returns ------- env : dict Dictionary of shell's environment. (None if the subproc command fails) aliases : dict Dictionary of shell's aliases, this includes foreign function wrappers.(None if the subproc command fails)
def foreign_shell_data( shell, interactive=True, login=False, envcmd=None, aliascmd=None, extra_args=(), currenv=None, safe=True, prevcmd="", postcmd="", funcscmd=None, sourcer=None, use_tmpfile=False, tmpfile_ext=None, runcmd=None, seterrprevcmd=None, seterrpostcmd=None, show=False, dryrun=False, files=(), ): """Extracts data from a foreign (non-xonsh) shells. Currently this gets the environment, aliases, and functions but may be extended in the future. Parameters ---------- shell : str The name of the shell, such as 'bash' or '/bin/sh'. interactive : bool, optional Whether the shell should be run in interactive mode. login : bool, optional Whether the shell should be a login shell. envcmd : str or None, optional The command to generate environment output with. aliascmd : str or None, optional The command to generate alias output with. extra_args : tuple of str, optional Additional command line options to pass into the shell. currenv : tuple of items or None, optional Manual override for the current environment. safe : bool, optional Flag for whether or not to safely handle exceptions and other errors. prevcmd : str, optional A command to run in the shell before anything else, useful for sourcing and other commands that may require environment recovery. postcmd : str, optional A command to run after everything else, useful for cleaning up any damage that the prevcmd may have caused. funcscmd : str or None, optional This is a command or script that can be used to determine the names and locations of any functions that are native to the foreign shell. This command should print *only* a JSON object that maps function names to the filenames where the functions are defined. If this is None, then a default script will attempted to be looked up based on the shell name. Callable wrappers for these functions will be returned in the aliases dictionary. sourcer : str or None, optional How to source a foreign shell file for purposes of calling functions in that shell. If this is None, a default value will attempt to be looked up based on the shell name. use_tmpfile : bool, optional This specifies if the commands are written to a tmp file or just parsed directly to the shell tmpfile_ext : str or None, optional If tmpfile is True this sets specifies the extension used. runcmd : str or None, optional Command line switches to use when running the script, such as -c for Bash and /C for cmd.exe. seterrprevcmd : str or None, optional Command that enables exit-on-error for the shell that is run at the start of the script. For example, this is "set -e" in Bash. To disable exit-on-error behavior, simply pass in an empty string. seterrpostcmd : str or None, optional Command that enables exit-on-error for the shell that is run at the end of the script. For example, this is "if errorlevel 1 exit 1" in cmd.exe. To disable exit-on-error behavior, simply pass in an empty string. show : bool, optional Whether or not to display the script that will be run. dryrun : bool, optional Whether or not to actually run and process the command. files : tuple of str, optional Paths to source. Returns ------- env : dict Dictionary of shell's environment. (None if the subproc command fails) aliases : dict Dictionary of shell's aliases, this includes foreign function wrappers.(None if the subproc command fails) """ cmd = [shell] cmd.extend(extra_args) # needs to come here for GNU long options if interactive: cmd.append("-i") if login: cmd.append("-l") shkey = CANON_SHELL_NAMES[shell] envcmd = DEFAULT_ENVCMDS.get(shkey, "env") if envcmd is None else envcmd aliascmd = DEFAULT_ALIASCMDS.get(shkey, "alias") if aliascmd is None else aliascmd funcscmd = DEFAULT_FUNCSCMDS.get(shkey, "echo {}") if funcscmd is None else funcscmd tmpfile_ext = ( DEFAULT_TMPFILE_EXT.get(shkey, "sh") if tmpfile_ext is None else tmpfile_ext ) runcmd = DEFAULT_RUNCMD.get(shkey, "-c") if runcmd is None else runcmd seterrprevcmd = ( DEFAULT_SETERRPREVCMD.get(shkey, "") if seterrprevcmd is None else seterrprevcmd ) seterrpostcmd = ( DEFAULT_SETERRPOSTCMD.get(shkey, "") if seterrpostcmd is None else seterrpostcmd ) command = COMMAND.format( envcmd=envcmd, aliascmd=aliascmd, prevcmd=prevcmd, postcmd=postcmd, funcscmd=funcscmd, seterrprevcmd=seterrprevcmd, seterrpostcmd=seterrpostcmd, ).strip() if show: print(command) if dryrun: return None, None cmd.append(runcmd) if not use_tmpfile: cmd.append(command) else: tmpfile = tempfile.NamedTemporaryFile(suffix=tmpfile_ext, delete=False) tmpfile.write(command.encode("utf8")) tmpfile.close() cmd.append(tmpfile.name) if currenv is None and XSH.env: currenv = XSH.env.detype() elif currenv is not None: currenv = dict(currenv) try: s = subprocess.check_output( cmd, stderr=subprocess.PIPE, env=currenv, # start new session to avoid hangs # (doesn't work on Cygwin though) start_new_session=((not ON_CYGWIN) and (not ON_MSYS)), text=True, ) except (subprocess.CalledProcessError, FileNotFoundError): if not safe: raise return None, None finally: if use_tmpfile: os.remove(tmpfile.name) env = parse_env(s) aliases = parse_aliases( s, shell=shell, sourcer=sourcer, files=files, extra_args=extra_args, ) funcs = parse_funcs( s, shell=shell, sourcer=sourcer, files=files, extra_args=extra_args, ) aliases.update(funcs) return env, aliases
Parses the environment portion of string into a dict.
def parse_env(s): """Parses the environment portion of string into a dict.""" m = ENV_RE.search(s) if m is None: return {} g1 = m.group(1) g1 = g1[:-1] if g1.endswith("\n") else g1 env = dict(ENV_SPLIT_RE.findall(g1)) return env
Parses the aliases portion of string into a dict.
def parse_aliases(s, shell, sourcer=None, files=(), extra_args=()): """Parses the aliases portion of string into a dict.""" m = ALIAS_RE.search(s) if m is None: return {} g1 = m.group(1) g1 = g1.replace("\\\n", " ") items = [ line.split("=", 1) for line in g1.splitlines() if line.startswith("alias ") and "=" in line ] aliases = {} for key, value in items: try: key = key[6:] # lstrip 'alias ' # undo bash's weird quoting of single quotes (sh_single_quote) value = value.replace("'\\''", "'") # strip one single quote at the start and end of value if value[0] == "'" and value[-1] == "'": value = value[1:-1] # now compute actual alias if FS_EXEC_ALIAS_RE.search(value) is None: # simple list of args alias value = shlex.split(value) else: # alias is more complex, use ExecAlias, but via shell value = ForeignShellExecAlias( src=value, shell=shell, sourcer=sourcer, files=files, extra_args=extra_args, ) except ValueError as exc: warnings.warn( f'could not parse alias "{key}": {exc!r}', RuntimeWarning, stacklevel=2, ) continue aliases[key] = value return aliases
Parses the funcs portion of a string into a dict of callable foreign function wrappers.
def parse_funcs(s, shell, sourcer=None, files=(), extra_args=()): """Parses the funcs portion of a string into a dict of callable foreign function wrappers. """ m = FUNCS_RE.search(s) if m is None: return {} g1 = m.group(1) if ON_WINDOWS: g1 = g1.replace(os.sep, os.altsep) funcnames = g1.split() funcs = {} for funcname in funcnames: if funcname.startswith("_"): continue # skip private functions and invalid files wrapper = ForeignShellFunctionAlias( funcname=funcname, shell=shell, sourcer=sourcer, files=files, extra_args=extra_args, ) funcs[funcname] = wrapper return funcs
Ensures that a mapping follows the shell specification.
def ensure_shell(shell): """Ensures that a mapping follows the shell specification.""" if not isinstance(shell, cabc.MutableMapping): shell = dict(shell) shell_keys = set(shell.keys()) if not (shell_keys <= VALID_SHELL_PARAMS): raise KeyError(f"unknown shell keys: {shell_keys - VALID_SHELL_PARAMS}") shell["shell"] = ensure_string(shell["shell"]).lower() if "interactive" in shell_keys: shell["interactive"] = to_bool(shell["interactive"]) if "login" in shell_keys: shell["login"] = to_bool(shell["login"]) if "envcmd" in shell_keys: shell["envcmd"] = ( None if shell["envcmd"] is None else ensure_string(shell["envcmd"]) ) if "aliascmd" in shell_keys: shell["aliascmd"] = ( None if shell["aliascmd"] is None else ensure_string(shell["aliascmd"]) ) if "extra_args" in shell_keys and not isinstance(shell["extra_args"], tuple): shell["extra_args"] = tuple(map(ensure_string, shell["extra_args"])) if "currenv" in shell_keys and not isinstance(shell["currenv"], tuple): ce = shell["currenv"] if isinstance(ce, cabc.Mapping): ce = tuple((ensure_string(k), v) for k, v in ce.items()) elif isinstance(ce, cabc.Sequence): ce = tuple((ensure_string(k), v) for k, v in ce) else: raise RuntimeError("unrecognized type for currenv") shell["currenv"] = ce if "safe" in shell_keys: shell["safe"] = to_bool(shell["safe"]) if "prevcmd" in shell_keys: shell["prevcmd"] = ensure_string(shell["prevcmd"]) if "postcmd" in shell_keys: shell["postcmd"] = ensure_string(shell["postcmd"]) if "funcscmd" in shell_keys: shell["funcscmd"] = ( None if shell["funcscmd"] is None else ensure_string(shell["funcscmd"]) ) if "sourcer" in shell_keys: shell["sourcer"] = ( None if shell["sourcer"] is None else ensure_string(shell["sourcer"]) ) if "seterrprevcmd" in shell_keys: shell["seterrprevcmd"] = ( None if shell["seterrprevcmd"] is None else ensure_string(shell["seterrprevcmd"]) ) if "seterrpostcmd" in shell_keys: shell["seterrpostcmd"] = ( None if shell["seterrpostcmd"] is None else ensure_string(shell["seterrpostcmd"]) ) return shell
Loads environments from foreign shells. Parameters ---------- shells : sequence of dicts An iterable of dicts that can be passed into foreign_shell_data() as keyword arguments. Returns ------- env : dict A dictionary of the merged environments.
def load_foreign_envs(shells): """Loads environments from foreign shells. Parameters ---------- shells : sequence of dicts An iterable of dicts that can be passed into foreign_shell_data() as keyword arguments. Returns ------- env : dict A dictionary of the merged environments. """ env = {} for shell in shells: shell = ensure_shell(shell) shenv, _ = foreign_shell_data(**shell) if shenv: env.update(shenv) return env
Loads aliases from foreign shells. Parameters ---------- shells : sequence of dicts An iterable of dicts that can be passed into foreign_shell_data() as keyword arguments. Returns ------- aliases : dict A dictionary of the merged aliases.
def load_foreign_aliases(shells): """Loads aliases from foreign shells. Parameters ---------- shells : sequence of dicts An iterable of dicts that can be passed into foreign_shell_data() as keyword arguments. Returns ------- aliases : dict A dictionary of the merged aliases. """ aliases = {} xonsh_aliases = XSH.aliases for shell in shells: shell = ensure_shell(shell) _, shaliases = foreign_shell_data(**shell) if not XSH.env.get("FOREIGN_ALIASES_OVERRIDE"): shaliases = {} if shaliases is None else shaliases for alias in set(shaliases) & set(xonsh_aliases): del shaliases[alias] if XSH.env.get("XONSH_DEBUG") >= 1: print( f"aliases: ignoring alias {alias!r} of shell {shell['shell']!r} " "which tries to override xonsh alias.", file=sys.stderr, ) aliases.update(shaliases) return aliases
Finds the source encoding given bytes representing a file by checking a special comment at either the first or second line of the source file. https://docs.python.org/3/howto/unicode.html#unicode-literals-in-python-source-code If no encoding is found, UTF-8 codec with BOM signature will be returned as it skips an optional UTF-8 encoded BOM at the start of the data and is otherwise the same as UTF-8 https://docs.python.org/3/library/codecs.html#module-encodings.utf_8_sig
def find_source_encoding(src): """Finds the source encoding given bytes representing a file by checking a special comment at either the first or second line of the source file. https://docs.python.org/3/howto/unicode.html#unicode-literals-in-python-source-code If no encoding is found, UTF-8 codec with BOM signature will be returned as it skips an optional UTF-8 encoded BOM at the start of the data and is otherwise the same as UTF-8 https://docs.python.org/3/library/codecs.html#module-encodings.utf_8_sig """ utf8 = "utf-8-sig" first, _, rest = src.partition(b"\n") m = ENCODING_LINE.match(first) if m is not None: return m.group(1).decode(utf8) second, _, _ = rest.partition(b"\n") m = ENCODING_LINE.match(second) if m is not None: return m.group(1).decode(utf8) return utf8
Figures out if we should dispatch to a load event
def _should_dispatch_xonsh_import_event_loader(): """Figures out if we should dispatch to a load event""" return ( len(events.on_import_pre_create_module) > 0 or len(events.on_import_post_create_module) > 0 or len(events.on_import_pre_exec_module) > 0 or len(events.on_import_post_exec_module) > 0 )
Install Xonsh import hooks in ``sys.meta_path`` in order for ``.xsh`` files to be importable and import events to be fired. Can safely be called many times, will be no-op if xonsh import hooks are already present.
def install_import_hooks(execer=ARG_NOT_PRESENT): """ Install Xonsh import hooks in ``sys.meta_path`` in order for ``.xsh`` files to be importable and import events to be fired. Can safely be called many times, will be no-op if xonsh import hooks are already present. """ if execer is ARG_NOT_PRESENT: print_warning( "No execer was passed to install_import_hooks. " "This will become an error in future." ) execer = XSH.execer if execer is None: execer = Execer() XSH.load(execer=execer) found_imp = found_event = False for hook in sys.meta_path: if isinstance(hook, XonshImportHook): found_imp = True elif isinstance(hook, XonshImportEventHook): found_event = True if not found_imp: sys.meta_path.append(XonshImportHook(execer)) if not found_event: sys.meta_path.insert(0, XonshImportEventHook())
Make an object info dict with all fields present.
def object_info(**kw): """Make an object info dict with all fields present.""" infodict = dict(itertools.zip_longest(info_fields, [None])) infodict.update(kw) return infodict
Get encoding for python source file defining obj Returns None if obj is not defined in a sourcefile.
def get_encoding(obj): """Get encoding for python source file defining obj Returns None if obj is not defined in a sourcefile. """ ofile = find_file(obj) # run contents of file through pager starting at line where the object # is defined, as long as the file isn't binary and is actually on the # filesystem. if ofile is None: return None elif ofile.endswith((".so", ".dll", ".pyd")): return None elif not os.path.isfile(ofile): return None else: # Print only text files, not extension binaries. Note that # getsourcelines returns lineno with 1-offset and page() uses # 0-offset, so we must adjust. with open(ofile, "rb") as buf: # Tweaked to use io.open for Python 2 encoding, _ = detect_encoding(buf.readline) return encoding
Stable wrapper around inspect.getdoc. This can't crash because of attribute problems. It also attempts to call a getdoc() method on the given object. This allows objects which provide their docstrings via non-standard mechanisms (like Pyro proxies) to still be inspected by ipython's ? system.
def getdoc(obj): """Stable wrapper around inspect.getdoc. This can't crash because of attribute problems. It also attempts to call a getdoc() method on the given object. This allows objects which provide their docstrings via non-standard mechanisms (like Pyro proxies) to still be inspected by ipython's ? system.""" # Allow objects to offer customized documentation via a getdoc method: try: ds = obj.getdoc() except Exception: # pylint:disable=broad-except pass else: # if we get extra info, we add it to the normal docstring. if isinstance(ds, str): return inspect.cleandoc(ds) try: docstr = inspect.getdoc(obj) encoding = get_encoding(obj) return cast_unicode(docstr, encoding=encoding) except Exception: # pylint:disable=broad-except # Harden against an inspect failure, which can occur with # SWIG-wrapped extensions. raise
Wrapper around inspect.getsource. This can be modified by other projects to provide customized source extraction. Inputs: - obj: an object whose source code we will attempt to extract. Optional inputs: - is_binary: whether the object is known to come from a binary source. This implementation will skip returning any output for binary objects, but custom extractors may know how to meaningfully process them.
def getsource(obj, is_binary=False): """Wrapper around inspect.getsource. This can be modified by other projects to provide customized source extraction. Inputs: - obj: an object whose source code we will attempt to extract. Optional inputs: - is_binary: whether the object is known to come from a binary source. This implementation will skip returning any output for binary objects, but custom extractors may know how to meaningfully process them.""" if is_binary: return None else: # get source if obj was decorated with @decorator if hasattr(obj, "__wrapped__"): obj = obj.__wrapped__ try: src = inspect.getsource(obj) except TypeError: if hasattr(obj, "__class__"): src = inspect.getsource(obj.__class__) encoding = get_encoding(obj) return cast_unicode(src, encoding=encoding)
True if obj is a function ()
def is_simple_callable(obj): """True if obj is a function ()""" return ( inspect.isfunction(obj) or inspect.ismethod(obj) or isinstance(obj, _builtin_func_type) or isinstance(obj, _builtin_meth_type) )
Wrapper around :func:`inspect.getfullargspec` on Python 3, and :func:inspect.getargspec` on Python 2. In addition to functions and methods, this can also handle objects with a ``__call__`` attribute.
def getargspec(obj): """Wrapper around :func:`inspect.getfullargspec` on Python 3, and :func:inspect.getargspec` on Python 2. In addition to functions and methods, this can also handle objects with a ``__call__`` attribute. """ if safe_hasattr(obj, "__call__") and not is_simple_callable(obj): obj = obj.__call__ return inspect.getfullargspec(obj)
Format argspect, convenience wrapper around inspect's. This takes a dict instead of ordered arguments and calls inspect.format_argspec with the arguments in the necessary order.
def format_argspec(argspec): """Format argspect, convenience wrapper around inspect's. This takes a dict instead of ordered arguments and calls inspect.format_argspec with the arguments in the necessary order. """ return inspect.formatargspec( argspec["args"], argspec["varargs"], argspec["varkw"], argspec["defaults"] )
Extract call tip data from an oinfo dict. Parameters ---------- oinfo : dict format_call : bool, optional If True, the call line is formatted and returned as a string. If not, a tuple of (name, argspec) is returned. Returns ------- call_info : None, str or (str, dict) tuple. When format_call is True, the whole call information is formatted as a single string. Otherwise, the object's name and its argspec dict are returned. If no call information is available, None is returned. docstring : str or None The most relevant docstring for calling purposes is returned, if available. The priority is: call docstring for callable instances, then constructor docstring for classes, then main object's docstring otherwise (regular functions).
def call_tip(oinfo, format_call=True): """Extract call tip data from an oinfo dict. Parameters ---------- oinfo : dict format_call : bool, optional If True, the call line is formatted and returned as a string. If not, a tuple of (name, argspec) is returned. Returns ------- call_info : None, str or (str, dict) tuple. When format_call is True, the whole call information is formatted as a single string. Otherwise, the object's name and its argspec dict are returned. If no call information is available, None is returned. docstring : str or None The most relevant docstring for calling purposes is returned, if available. The priority is: call docstring for callable instances, then constructor docstring for classes, then main object's docstring otherwise (regular functions). """ # Get call definition argspec = oinfo.get("argspec") if argspec is None: call_line = None else: # Callable objects will have 'self' as their first argument, prune # it out if it's there for clarity (since users do *not* pass an # extra first argument explicitly). try: has_self = argspec["args"][0] == "self" except (KeyError, IndexError): pass else: if has_self: argspec["args"] = argspec["args"][1:] call_line = oinfo["name"] + format_argspec(argspec) # Now get docstring. # The priority is: call docstring, constructor docstring, main one. doc = oinfo.get("call_docstring") if doc is None: doc = oinfo.get("init_docstring") if doc is None: doc = oinfo.get("docstring", "") return call_line, doc
Find the absolute path to the file where an object was defined. This is essentially a robust wrapper around `inspect.getabsfile`. Returns None if no file can be found. Parameters ---------- obj : any Python object Returns ------- fname : str The absolute path to the file where the object was defined.
def find_file(obj): """Find the absolute path to the file where an object was defined. This is essentially a robust wrapper around `inspect.getabsfile`. Returns None if no file can be found. Parameters ---------- obj : any Python object Returns ------- fname : str The absolute path to the file where the object was defined. """ # get source if obj was decorated with @decorator if safe_hasattr(obj, "__wrapped__"): obj = obj.__wrapped__ fname = None try: fname = inspect.getabsfile(obj) except TypeError: # For an instance, the file that matters is where its class was # declared. if hasattr(obj, "__class__"): try: fname = inspect.getabsfile(obj.__class__) except TypeError: # Can happen for builtins pass except Exception: pass return cast_unicode(fname)
Find the line number in a file where an object was defined. This is essentially a robust wrapper around `inspect.getsourcelines`. Returns None if no file can be found. Parameters ---------- obj : any Python object Returns ------- lineno : int The line number where the object definition starts.
def find_source_lines(obj): """Find the line number in a file where an object was defined. This is essentially a robust wrapper around `inspect.getsourcelines`. Returns None if no file can be found. Parameters ---------- obj : any Python object Returns ------- lineno : int The line number where the object definition starts. """ # get source if obj was decorated with @decorator if safe_hasattr(obj, "__wrapped__"): obj = obj.__wrapped__ try: try: lineno = inspect.getsourcelines(obj)[1] except TypeError: # For instances, try the class object like getsource() does if hasattr(obj, "__class__"): lineno = inspect.getsourcelines(obj.__class__)[1] else: lineno = None except Exception: return None return lineno
Context manager that replaces a thread's task queue and job dictionary with those of the main thread This allows another thread (e.g. the commands jobs, disown, and bg) to handle the main thread's job control.
def use_main_jobs(): """Context manager that replaces a thread's task queue and job dictionary with those of the main thread This allows another thread (e.g. the commands jobs, disown, and bg) to handle the main thread's job control. """ old_tasks = get_tasks() old_jobs = get_jobs() try: _jobs_thread_local.tasks = _tasks_main _jobs_thread_local.jobs = XSH.all_jobs yield finally: _jobs_thread_local.tasks = old_tasks _jobs_thread_local.jobs = old_jobs
Safely call wait_for_active_job()
def _safe_wait_for_active_job(last_task=None, backgrounded=False): """Safely call wait_for_active_job()""" have_error = True while have_error: try: rtn = wait_for_active_job( last_task=last_task, backgrounded=backgrounded, return_error=True ) except ChildProcessError as e: rtn = e have_error = isinstance(rtn, ChildProcessError) return rtn
Get the next active task and put it on top of the queue
def get_next_task(): """Get the next active task and put it on top of the queue""" tasks = get_tasks() _clear_dead_jobs() selected_task = None for tid in tasks: task = get_task(tid) if not task["bg"] and task["status"] == "running": selected_task = tid break if selected_task is None: return tasks.remove(selected_task) tasks.appendleft(selected_task) return get_task(selected_task)
Print a line describing job number ``num``.
def print_one_job(num, outfile=sys.stdout, format="dict"): """Print a line describing job number ``num``.""" info = format_job_string(num, format) if info: print(info, file=outfile)
Get the lowest available unique job number (for the next job created).
def get_next_job_number(): """Get the lowest available unique job number (for the next job created).""" _clear_dead_jobs() i = 1 while i in get_jobs(): i += 1 return i
Add a new job to the jobs dictionary.
def add_job(info): """Add a new job to the jobs dictionary.""" num = get_next_job_number() info["started"] = time.time() info["status"] = "running" get_tasks().appendleft(num) get_jobs()[num] = info if info["bg"] and XSH.env.get("XONSH_INTERACTIVE"): print_one_job(num)
Clean up jobs for exiting shell In non-interactive mode, send SIGHUP to all jobs. In interactive mode, check for suspended or background jobs, print a warning if any exist, and return False. Otherwise, return True.
def clean_jobs(): """Clean up jobs for exiting shell In non-interactive mode, send SIGHUP to all jobs. In interactive mode, check for suspended or background jobs, print a warning if any exist, and return False. Otherwise, return True. """ jobs_clean = True if XSH.env["XONSH_INTERACTIVE"]: _clear_dead_jobs() if get_jobs(): global _last_exit_time hist = XSH.history if hist is not None and len(hist.tss) > 0: last_cmd_start = hist.tss[-1][0] else: last_cmd_start = None if _last_exit_time and last_cmd_start and _last_exit_time > last_cmd_start: # Exit occurred after last command started, so it was called as # part of the last command and is now being called again # immediately. Kill jobs and exit without reminder about # unfinished jobs in this case. hup_all_jobs() else: if len(get_jobs()) > 1: msg = "there are unfinished jobs" else: msg = "there is an unfinished job" if XSH.env["SHELL_TYPE"] != "prompt_toolkit": # The Ctrl+D binding for prompt_toolkit already inserts a # newline print() print(f"xonsh: {msg}", file=sys.stderr) print("-" * 5, file=sys.stderr) jobs([], stdout=sys.stderr) print("-" * 5, file=sys.stderr) print( 'Type "exit" or press "ctrl-d" again to force quit.', file=sys.stderr, ) jobs_clean = False _last_exit_time = time.time() else: hup_all_jobs() return jobs_clean
Send SIGHUP to all child processes (called when exiting xonsh).
def hup_all_jobs(): """ Send SIGHUP to all child processes (called when exiting xonsh). """ _clear_dead_jobs() for job in get_jobs().values(): _hup(job)
xonsh command: jobs Display a list of all current jobs.
def jobs(args, stdin=None, stdout=sys.stdout, stderr=None): """ xonsh command: jobs Display a list of all current jobs. """ _clear_dead_jobs() format = "posix" if "--posix" in args else "dict" for j in get_tasks(): print_one_job(j, outfile=stdout, format=format) return None, None
used by fg and bg to resume a job either in the foreground or in the background.
def resume_job(args, wording: tp.Literal["fg", "bg"]): """ used by fg and bg to resume a job either in the foreground or in the background. """ _clear_dead_jobs() tasks = get_tasks() if len(tasks) == 0: return "", "There are currently no suspended jobs" if len(args) == 0: tid = tasks[0] # take the last manipulated task by default elif len(args) == 1: try: if args[0] == "+": # take the last manipulated task tid = tasks[0] elif args[0] == "-": # take the second to last manipulated task tid = tasks[1] else: tid = int(args[0]) except (ValueError, IndexError): return "", f"Invalid job: {args[0]}\n" if tid not in get_jobs(): return "", f"Invalid job: {args[0]}\n" else: return "", f"{wording} expects 0 or 1 arguments, not {len(args)}\n" # Put this one on top of the queue tasks.remove(tid) tasks.appendleft(tid) job = get_task(tid) job["bg"] = False job["status"] = "running" if XSH.env.get("XONSH_INTERACTIVE"): print_one_job(tid) pipeline = job["pipeline"] pipeline.resume( job, tee_output=(wording == "fg") )
xonsh command: fg Bring the currently active job to the foreground, or, if a single number is given as an argument, bring that job to the foreground. Additionally, specify "+" for the most recent job and "-" for the second most recent job.
def fg(args, stdin=None): """ xonsh command: fg Bring the currently active job to the foreground, or, if a single number is given as an argument, bring that job to the foreground. Additionally, specify "+" for the most recent job and "-" for the second most recent job. """ return resume_job(args, wording="fg")
xonsh command: bg Resume execution of the currently active job in the background, or, if a single number is given as an argument, resume that job in the background.
def bg(args, stdin=None): """xonsh command: bg Resume execution of the currently active job in the background, or, if a single number is given as an argument, resume that job in the background. """ res = resume_job(args, wording="bg") if res is None: curtask = get_task(get_tasks()[0]) curtask["bg"] = True _continue(curtask) else: return res
Return currently running jobs ids
def job_id_completer(xsh, **_): """Return currently running jobs ids""" for job_id in get_jobs(): yield RichCompletion(str(job_id), description=format_job_string(job_id))
Remove the specified jobs from the job table; the shell will no longer report their status, and will not complain if you try to exit an interactive shell with them running or stopped. If the jobs are currently stopped and the $AUTO_CONTINUE option is not set ($AUTO_CONTINUE = False), a warning is printed containing information about how to make them continue after they have been disowned. Parameters ---------- job_ids Jobs to act on or none to disown the current job force_auto_continue : -c, --continue Automatically continue stopped jobs when they are disowned, equivalent to setting $AUTO_CONTINUE=True
def disown_fn( job_ids: Annotated[ tp.Sequence[int], Arg(type=int, nargs="*", completer=job_id_completer) ], force_auto_continue=False, ): """Remove the specified jobs from the job table; the shell will no longer report their status, and will not complain if you try to exit an interactive shell with them running or stopped. If the jobs are currently stopped and the $AUTO_CONTINUE option is not set ($AUTO_CONTINUE = False), a warning is printed containing information about how to make them continue after they have been disowned. Parameters ---------- job_ids Jobs to act on or none to disown the current job force_auto_continue : -c, --continue Automatically continue stopped jobs when they are disowned, equivalent to setting $AUTO_CONTINUE=True """ tasks = get_tasks() if len(tasks) == 0: return "", "There are no active jobs" messages = [] # if args.job_ids is empty, use the active task for tid in job_ids or [tasks[0]]: try: current_task = get_task(tid) except KeyError: return "", f"'{tid}' is not a valid job ID" auto_cont = XSH.env.get("AUTO_CONTINUE", False) if auto_cont or force_auto_continue: _continue(current_task) elif current_task["status"] == "stopped": messages.append( f"warning: job is suspended, use " f"'kill -CONT -{current_task['pids'][-1]}' " f"to resume\n" ) # Stop tracking this task tasks.remove(tid) del get_jobs()[tid] messages.append(f"Removed job {tid} ({current_task['status']})") if messages: return "".join(messages)
JSON serializer for xonsh custom data structures. This is only called when another normal JSON types are not found.
def serialize_xonsh_json(val): """JSON serializer for xonsh custom data structures. This is only called when another normal JSON types are not found. """ return str(val)
Decorator for constructing lazy objects from a function.
def lazyobject(f: tp.Callable[..., RT]) -> RT: """Decorator for constructing lazy objects from a function.""" return LazyObject(f, f.__globals__, f.__name__)
Decorator for constructing lazy dicts from a function.
def lazydict(f): """Decorator for constructing lazy dicts from a function.""" return LazyDict(f, f.__globals__, f.__name__)
Decorator for constructing lazy booleans from a function.
def lazybool(f): """Decorator for constructing lazy booleans from a function.""" return LazyBool(f, f.__globals__, f.__name__)
Entry point for loading modules in background thread. Parameters ---------- name : str Module name to load in background thread. package : str or None, optional Package name, has the same meaning as in importlib.import_module(). debug : str, optional Debugging symbol name to look up in the environment. env : Mapping or None, optional Environment this will default to __xonsh__.env, if available, and os.environ otherwise. replacements : Mapping or None, optional Dictionary mapping fully qualified module names (eg foo.bar.baz) that import the lazily loaded module, with the variable name in that module. For example, suppose that foo.bar imports module a as b, this dict is then {'foo.bar': 'b'}. Returns ------- module : ModuleType This is either the original module that is found in sys.modules or a proxy module that will block until delay attribute access until the module is fully loaded.
def load_module_in_background( name, package=None, debug="DEBUG", env=None, replacements=None ): """Entry point for loading modules in background thread. Parameters ---------- name : str Module name to load in background thread. package : str or None, optional Package name, has the same meaning as in importlib.import_module(). debug : str, optional Debugging symbol name to look up in the environment. env : Mapping or None, optional Environment this will default to __xonsh__.env, if available, and os.environ otherwise. replacements : Mapping or None, optional Dictionary mapping fully qualified module names (eg foo.bar.baz) that import the lazily loaded module, with the variable name in that module. For example, suppose that foo.bar imports module a as b, this dict is then {'foo.bar': 'b'}. Returns ------- module : ModuleType This is either the original module that is found in sys.modules or a proxy module that will block until delay attribute access until the module is fully loaded. """ modname = importlib.util.resolve_name(name, package) if modname in sys.modules: return sys.modules[modname] if env is None: xonsh_obj = getattr(builtins, "__xonsh__", None) env = os.environ if xonsh_obj is None else getattr(xonsh_obj, "env", os.environ) if env.get(debug, None): mod = importlib.import_module(name, package=package) return mod proxy = sys.modules[modname] = BackgroundModuleProxy(modname) BackgroundModuleLoader(name, package, replacements or {}) return proxy
Creates an index for a JSON file.
def index(obj, sort_keys=False): """Creates an index for a JSON file.""" idx = {} json_obj = _to_json_with_size(obj, sort_keys=sort_keys) s, idx["offsets"], _, idx["sizes"] = json_obj return s, idx
Dumps an object to JSON with an index.
def dumps(obj, sort_keys=False): """Dumps an object to JSON with an index.""" data, idx = index(obj, sort_keys=sort_keys) jdx = json.dumps(idx, sort_keys=sort_keys) iloc = 69 ilen = len(jdx) dloc = iloc + ilen + 11 dlen = len(data) s = JSON_FORMAT.format( index=jdx, data=data, iloc=iloc, ilen=ilen, dloc=dloc, dlen=dlen ) return s
Dumps an object to JSON file.
def ljdump(obj, fp, sort_keys=False): """Dumps an object to JSON file.""" s = dumps(obj, sort_keys=sort_keys) fp.write(s)
Mapping from ``tokenize`` tokens (or token types) to PLY token types. If a simple one-to-one mapping from ``tokenize`` to PLY exists, the lexer will look it up here and generate a single PLY token of the given type. Otherwise, it will fall back to handling that token using one of the handlers in``special_handlers``.
def token_map(): """Mapping from ``tokenize`` tokens (or token types) to PLY token types. If a simple one-to-one mapping from ``tokenize`` to PLY exists, the lexer will look it up here and generate a single PLY token of the given type. Otherwise, it will fall back to handling that token using one of the handlers in``special_handlers``. """ tm = {} # operators _op_map = { # punctuation ",": "COMMA", ".": "PERIOD", ";": "SEMI", ":": "COLON", "...": "ELLIPSIS", # basic operators "+": "PLUS", "-": "MINUS", "*": "TIMES", "@": "AT", "/": "DIVIDE", "//": "DOUBLEDIV", "%": "MOD", "**": "POW", "|": "PIPE", "~": "TILDE", "^": "XOR", "<<": "LSHIFT", ">>": "RSHIFT", "<": "LT", "<=": "LE", ">": "GT", ">=": "GE", "==": "EQ", "!=": "NE", "->": "RARROW", # assignment operators "=": "EQUALS", "+=": "PLUSEQUAL", "-=": "MINUSEQUAL", "*=": "TIMESEQUAL", "@=": "ATEQUAL", "/=": "DIVEQUAL", "%=": "MODEQUAL", "**=": "POWEQUAL", "<<=": "LSHIFTEQUAL", ">>=": "RSHIFTEQUAL", "&=": "AMPERSANDEQUAL", "^=": "XOREQUAL", "|=": "PIPEEQUAL", "//=": "DOUBLEDIVEQUAL", # extra xonsh operators "?": "QUESTION", "??": "DOUBLE_QUESTION", "@$": "ATDOLLAR", "&": "AMPERSAND", ":=": "COLONEQUAL", } for op, typ in _op_map.items(): tm[(OP, op)] = typ tm[IOREDIRECT1] = "IOREDIRECT1" tm[IOREDIRECT2] = "IOREDIRECT2" tm[STRING] = "STRING" tm[DOLLARNAME] = "DOLLAR_NAME" tm[NUMBER] = "NUMBER" tm[SEARCHPATH] = "SEARCHPATH" tm[NEWLINE] = "NEWLINE" tm[INDENT] = "INDENT" tm[DEDENT] = "DEDENT" # python 3.10 (backwards and name token compatible) tokens tm[MATCH] = "MATCH" tm[CASE] = "CASE" return tm
Function for handling name tokens
def handle_name(state, token): """Function for handling name tokens""" typ = "NAME" state["last"] = token needs_whitespace = token.string in NEED_WHITESPACE has_whitespace = needs_whitespace and RE_NEED_WHITESPACE.match( token.line[max(0, token.start[1] - 1) :] ) if state["pymode"][-1][0]: if needs_whitespace and not has_whitespace: pass elif token.string in kwmod.kwlist + ["match", "case"]: typ = token.string.upper() yield _new_token(typ, token.string, token.start) else: if has_whitespace and token.string == "and": yield _new_token("AND", token.string, token.start) elif has_whitespace and token.string == "or": yield _new_token("OR", token.string, token.start) else: yield _new_token("NAME", token.string, token.start)
Function for handling ``)``
def handle_rparen(state, token): """ Function for handling ``)`` """ e = _end_delimiter(state, token) if e is None or state["tolerant"]: state["last"] = token yield _new_token("RPAREN", ")", token.start) else: yield _new_token("ERRORTOKEN", e, token.start)
Function for handling ``}``
def handle_rbrace(state, token): """Function for handling ``}``""" e = _end_delimiter(state, token) if e is None or state["tolerant"]: state["last"] = token yield _new_token("RBRACE", "}", token.start) else: yield _new_token("ERRORTOKEN", e, token.start)
Function for handling ``]``
def handle_rbracket(state, token): """ Function for handling ``]`` """ e = _end_delimiter(state, token) if e is None or state["tolerant"]: state["last"] = token yield _new_token("RBRACKET", "]", token.start) else: yield _new_token("ERRORTOKEN", e, token.start)
Function for handling special whitespace characters in subprocess mode
def handle_error_space(state, token): """ Function for handling special whitespace characters in subprocess mode """ if not state["pymode"][-1][0]: state["last"] = token yield _new_token("WS", token.string, token.start) else: yield from []
Function for handling special line continuations as whitespace characters in subprocess mode.
def handle_error_linecont(state, token): """Function for handling special line continuations as whitespace characters in subprocess mode. """ if state["pymode"][-1][0]: return prev = state["last"] if prev.end != token.start: return # previous token is separated by whitespace state["last"] = token yield _new_token("WS", "\\", token.start)
Function for handling error tokens
def handle_error_token(state, token): """ Function for handling error tokens """ state["last"] = token if token.string == "!": typ = "BANG" elif not state["pymode"][-1][0]: typ = "NAME" else: typ = "ERRORTOKEN" yield _new_token(typ, token.string, token.start)
Function for handling tokens that should be ignored
def handle_ignore(state, token): """Function for handling tokens that should be ignored""" yield from []
Mapping from ``tokenize`` tokens (or token types) to the proper function for generating PLY tokens from them. In addition to yielding PLY tokens, these functions may manipulate the Lexer's state.
def special_handlers(): """Mapping from ``tokenize`` tokens (or token types) to the proper function for generating PLY tokens from them. In addition to yielding PLY tokens, these functions may manipulate the Lexer's state. """ sh = { NL: handle_ignore, COMMENT: handle_ignore, ENCODING: handle_ignore, ENDMARKER: handle_ignore, NAME: handle_name, ERRORTOKEN: handle_error_token, LESS: handle_redirect, GREATER: handle_redirect, RIGHTSHIFT: handle_redirect, IOREDIRECT1: handle_redirect, IOREDIRECT2: handle_redirect, (OP, "<"): handle_redirect, (OP, ">"): handle_redirect, (OP, ">>"): handle_redirect, (OP, ")"): handle_rparen, (OP, "}"): handle_rbrace, (OP, "]"): handle_rbracket, (OP, "&&"): handle_double_amps, (OP, "||"): handle_double_pipe, (ERRORTOKEN, " "): handle_error_space, (ERRORTOKEN, "\\\n"): handle_error_linecont, (ERRORTOKEN, "\\\r\n"): handle_error_linecont, } _make_matcher_handler("(", "LPAREN", True, ")", sh) _make_matcher_handler("[", "LBRACKET", True, "]", sh) _make_matcher_handler("{", "LBRACE", True, "}", sh) _make_matcher_handler("$(", "DOLLAR_LPAREN", False, ")", sh) _make_matcher_handler("$[", "DOLLAR_LBRACKET", False, "]", sh) _make_matcher_handler("${", "DOLLAR_LBRACE", True, "}", sh) _make_matcher_handler("!(", "BANG_LPAREN", False, ")", sh) _make_matcher_handler("![", "BANG_LBRACKET", False, "]", sh) _make_matcher_handler("@(", "AT_LPAREN", True, ")", sh) _make_matcher_handler("@$(", "ATDOLLAR_LPAREN", False, ")", sh) return sh
General-purpose token handler. Makes use of ``token_map`` or ``special_map`` to yield one or more PLY tokens from the given input. Parameters ---------- state The current state of the lexer, including information about whether we are in Python mode or subprocess mode, which changes the lexer's behavior. Also includes the stream of tokens yet to be considered. token The token (from ``tokenize``) currently under consideration
def handle_token(state, token): """ General-purpose token handler. Makes use of ``token_map`` or ``special_map`` to yield one or more PLY tokens from the given input. Parameters ---------- state The current state of the lexer, including information about whether we are in Python mode or subprocess mode, which changes the lexer's behavior. Also includes the stream of tokens yet to be considered. token The token (from ``tokenize``) currently under consideration """ typ = token.type st = token.string pymode = state["pymode"][-1][0] if not pymode: if state["last"] is not None and state["last"].end != token.start: cur = token.start old = state["last"].end if cur[0] == old[0] and cur[1] > old[1]: yield _new_token("WS", token.line[old[1] : cur[1]], old) if (typ, st) in special_handlers: yield from special_handlers[(typ, st)](state, token) elif (typ, st) in token_map: state["last"] = token yield _new_token(token_map[(typ, st)], st, token.start) elif typ in special_handlers: yield from special_handlers[typ](state, token) elif typ in token_map: state["last"] = token yield _new_token(token_map[typ], st, token.start) else: m = f"Unexpected token: {token}" yield _new_token("ERRORTOKEN", m, token.start)
Given a string containing xonsh code, generates a stream of relevant PLY tokens using ``handle_token``.
def get_tokens(s, tolerant, pymode=True, tokenize_ioredirects=True): """ Given a string containing xonsh code, generates a stream of relevant PLY tokens using ``handle_token``. """ state = { "indents": [0], "last": None, "pymode": [(pymode, "", "", (0, 0))], "stream": tokenize( io.BytesIO(s.encode("utf-8")).readline, tolerant, tokenize_ioredirects ), "tolerant": tolerant, } while True: try: token = next(state["stream"]) yield from handle_token(state, token) except StopIteration: if len(state["pymode"]) > 1 and not tolerant: pm, o, m, p = state["pymode"][-1] l, c = p e = 'Unmatched "{}" at line {}, column {}' yield _new_token("ERRORTOKEN", e.format(o, l, c), (0, 0)) break except TokenError as e: # this is recoverable in single-line mode (from the shell) # (e.g., EOF while scanning string literal) yield _new_token("ERRORTOKEN", e.args[0], (0, 0)) break except IndentationError as e: # this is never recoverable yield _new_token("ERRORTOKEN", e, (0, 0)) break
Gets a sysctl value by name. If return_str is true, this will return a string representation, else it will return the raw value.
def sysctlbyname(name, return_str=True): """Gets a sysctl value by name. If return_str is true, this will return a string representation, else it will return the raw value. """ # forked from https://gist.github.com/pudquick/581a71425439f2cf8f09 size = c_uint(0) # Find out how big our buffer will be LIBC.sysctlbyname(name, None, byref(size), None, 0) # Make the buffer buf = create_string_buffer(size.value) # Re-run, but provide the buffer LIBC.sysctlbyname(name, buf, byref(size), None, 0) if return_str: return buf.value else: return buf.raw
Proxy function for loading process title
def get_setproctitle(): """Proxy function for loading process title""" try: from setproctitle import setproctitle as spt except ImportError: return return spt
Return a path only if the path is actually legal (file or directory) This is very similar to argparse.FileType, except that it doesn't return an open file handle, but rather simply validates the path.
def path_argument(s): """Return a path only if the path is actually legal (file or directory) This is very similar to argparse.FileType, except that it doesn't return an open file handle, but rather simply validates the path.""" s = os.path.abspath(os.path.expanduser(s)) if not os.path.exists(s): msg = f"{s!r} must be a valid path to a file or directory" raise argparse.ArgumentTypeError(msg) return s
Starts up the essential services in the proper order. This returns the environment instance as a convenience.
def start_services(shell_kwargs, args, pre_env=None): """Starts up the essential services in the proper order. This returns the environment instance as a convenience. """ if pre_env is None: pre_env = {} # create execer, which loads builtins ctx = shell_kwargs.get("ctx", {}) debug = to_bool_or_int(os.getenv("XONSH_DEBUG", "0")) events.on_timingprobe.fire(name="pre_execer_init") execer = Execer( filename="<stdin>", debug_level=debug, scriptcache=shell_kwargs.get("scriptcache", True), cacheall=shell_kwargs.get("cacheall", False), ) XSH.load(ctx=ctx, execer=execer, inherit_env=shell_kwargs.get("inherit_env", True)) events.on_timingprobe.fire(name="post_execer_init") install_import_hooks(execer) env = XSH.env for k, v in pre_env.items(): env[k] = v _autoload_xontribs(env) _load_rc_files(shell_kwargs, args, env, execer, ctx) # create shell XSH.shell = Shell(execer=execer, **shell_kwargs) ctx["__name__"] = "__main__" return env
Setup for main xonsh entry point. Returns parsed arguments.
def premain(argv=None): """Setup for main xonsh entry point. Returns parsed arguments.""" if argv is None: argv = sys.argv[1:] setup_timings(argv) setproctitle = get_setproctitle() if setproctitle is not None: setproctitle(" ".join(["xonsh"] + argv)) args = parser.parse_args(argv) if args.help: parser.print_help() parser.exit() shell_kwargs = { "shell_type": args.shell_type, "completer": False, "login": False, "scriptcache": args.scriptcache, "cacheall": args.cacheall, "ctx": XSH.ctx, } if args.login or sys.argv[0].startswith("-"): args.login = True shell_kwargs["login"] = True if args.norc: shell_kwargs["norc"] = True elif args.rc: shell_kwargs["rc"] = args.rc shell_kwargs["inherit_env"] = args.inherit_env sys.displayhook = _pprint_displayhook if args.command is not None: args.mode = XonshMode.single_command shell_kwargs["shell_type"] = "none" elif args.file is not None: args.mode = XonshMode.script_from_file shell_kwargs["shell_type"] = "none" elif not sys.stdin.isatty() and not args.force_interactive: args.mode = XonshMode.script_from_stdin shell_kwargs["shell_type"] = "none" else: args.mode = XonshMode.interactive shell_kwargs["completer"] = True shell_kwargs["login"] = True pre_env = { "XONSH_LOGIN": shell_kwargs["login"], "XONSH_INTERACTIVE": args.force_interactive or (args.mode == XonshMode.interactive), } env = start_services(shell_kwargs, args, pre_env=pre_env) if args.defines is not None: env.update([x.split("=", 1) for x in args.defines]) return args
Main entry point for xonsh cli.
def main_xonsh(args): """Main entry point for xonsh cli.""" if not ON_WINDOWS: def func_sig_ttin_ttou(n, f): pass signal.signal(signal.SIGTTIN, func_sig_ttin_ttou) signal.signal(signal.SIGTTOU, func_sig_ttin_ttou) events.on_post_init.fire() env = XSH.env shell = XSH.shell history = XSH.history exit_code = 0 if shell and not env["XONSH_INTERACTIVE"]: shell.ctx.update({"exit": sys.exit}) # store a sys.exc_info() tuple to record any exception that might occur in the user code that we are about to execute # if this does not change, no exceptions were thrown. Otherwise, print a traceback that does not expose xonsh internals exc_info = None, None, None try: if args.mode == XonshMode.interactive: # enter the shell # Setted again here because it is possible to call main_xonsh() without calling premain(), namely in the tests. env["XONSH_INTERACTIVE"] = True ignore_sigtstp() if ( env["XONSH_INTERACTIVE"] and not any(os.path.isfile(i) for i in env["XONSHRC"]) and not any(os.path.isdir(i) for i in env["XONSHRC_DIR"]) ): print_welcome_screen() events.on_pre_cmdloop.fire() try: shell.shell.cmdloop() finally: events.on_post_cmdloop.fire() elif args.mode == XonshMode.single_command: # run a single command and exit exc_info = run_code_with_cache( args.command.lstrip(), "<string>", shell.execer, glb=shell.ctx, mode="single", ) if history is not None and history.last_cmd_rtn is not None: exit_code = history.last_cmd_rtn elif args.mode == XonshMode.script_from_file: # run a script contained in a file path = os.path.abspath(os.path.expanduser(args.file)) if os.path.isfile(path): sys.argv = [args.file] + args.args env.update(make_args_env()) # $ARGS is not sys.argv env["XONSH_SOURCE"] = path shell.ctx.update({"__file__": args.file, "__name__": "__main__"}) exc_info = run_script_with_cache( args.file, shell.execer, glb=shell.ctx, loc=None, mode="exec" ) else: print(f"xonsh: {args.file}: No such file or directory.") exit_code = 1 elif args.mode == XonshMode.script_from_stdin: # run a script given on stdin code = sys.stdin.read() exc_info = run_code_with_cache( code, "<stdin>", shell.execer, glb=shell.ctx, loc=None, mode="exec" ) except SyntaxError: exit_code = 1 debug_level = env.get("XONSH_DEBUG", 0) if debug_level == 0: # print error without tracktrace display_error_message(sys.exc_info()) else: # pass error to finally clause exc_info = sys.exc_info() finally: if exc_info != (None, None, None): err_type, err, _ = exc_info if err_type is SystemExit: raise err print_exception(None, exc_info) exit_code = 1 events.on_exit.fire() postmain(args) return exit_code
Teardown for main xonsh entry point, accepts parsed arguments.
def postmain(args=None): """Teardown for main xonsh entry point, accepts parsed arguments.""" XSH.unload() XSH.shell = None
Generator that runs pre- and post-main() functions. This has two iterations. The first yields the shell. The second returns None but cleans up the shell.
def main_context(argv=None): """Generator that runs pre- and post-main() functions. This has two iterations. The first yields the shell. The second returns None but cleans up the shell. """ args = premain(argv) yield XSH.shell postmain(args)
Starts up a new xonsh shell. Calling this in function in another packages ``__init__.py`` will allow xonsh to be fully used in the package in headless or headed mode. This function is primarily indended to make starting up xonsh for 3rd party packages easier. Here is example of using this at the top of an ``__init__.py``:: from xonsh.main import setup setup() del setup Parameters ---------- ctx : dict-like or None, optional The xonsh context to start with. If None, an empty dictionary is provided. shell_type : str, optional The type of shell to start. By default this is 'none', indicating we should start in headless mode. env : dict-like, optional Environment to update the current environment with after the shell has been initialized. aliases : dict-like, optional Aliases to add after the shell has been initialized. xontribs : iterable of str, optional Xontrib names to load. threadable_predictors : dict-like, optional Threadable predictors to start up with. These overide the defaults.
def setup( ctx=None, shell_type="none", env=(("RAISE_SUBPROC_ERROR", True),), aliases=(), xontribs=(), threadable_predictors=(), ): """Starts up a new xonsh shell. Calling this in function in another packages ``__init__.py`` will allow xonsh to be fully used in the package in headless or headed mode. This function is primarily indended to make starting up xonsh for 3rd party packages easier. Here is example of using this at the top of an ``__init__.py``:: from xonsh.main import setup setup() del setup Parameters ---------- ctx : dict-like or None, optional The xonsh context to start with. If None, an empty dictionary is provided. shell_type : str, optional The type of shell to start. By default this is 'none', indicating we should start in headless mode. env : dict-like, optional Environment to update the current environment with after the shell has been initialized. aliases : dict-like, optional Aliases to add after the shell has been initialized. xontribs : iterable of str, optional Xontrib names to load. threadable_predictors : dict-like, optional Threadable predictors to start up with. These overide the defaults. """ ctx = {} if ctx is None else ctx # setup xonsh ctx and execer if not hasattr(builtins, "__xonsh__"): execer = Execer(filename="<stdin>") XSH.load(ctx=ctx, execer=execer) XSH.shell = Shell(execer, ctx=ctx, shell_type=shell_type) XSH.env.update(env) install_import_hooks(XSH.execer) XSH.aliases.update(aliases) if xontribs: xontribs_load(xontribs) if threadable_predictors: XSH.commands_cache.threadable_predictors.update(threadable_predictors)
Converts a bytes string with python source code to unicode. Unicode strings are passed through unchanged. Byte strings are checked for the python source file encoding cookie to determine encoding. txt can be either a bytes buffer or a string containing the source code.
def source_to_unicode(txt, errors="replace", skip_encoding_cookie=True): """Converts a bytes string with python source code to unicode. Unicode strings are passed through unchanged. Byte strings are checked for the python source file encoding cookie to determine encoding. txt can be either a bytes buffer or a string containing the source code. """ if isinstance(txt, str): return txt if isinstance(txt, bytes): buf = io.BytesIO(txt) else: buf = txt try: encoding, _ = detect_encoding(buf.readline) except SyntaxError: encoding = "ascii" buf.seek(0) text = io.TextIOWrapper(buf, encoding, errors=errors, line_buffering=True) text.mode = "r" if skip_encoding_cookie: return "".join(strip_encoding_cookie(text)) else: return text.read()
Generator to pull lines from a text-mode file, skipping the encoding cookie if it is found in the first two lines.
def strip_encoding_cookie(filelike): """Generator to pull lines from a text-mode file, skipping the encoding cookie if it is found in the first two lines. """ it = iter(filelike) try: first = next(it) if not cookie_comment_re.match(first): yield first second = next(it) if not cookie_comment_re.match(second): yield second except StopIteration: return yield from it
Read a Python file, using the encoding declared inside the file. Parameters ---------- filename : str The path to the file to read. skip_encoding_cookie : bool If True (the default), and the encoding declaration is found in the first two lines, that line will be excluded from the output - compiling a unicode string with an encoding declaration is a SyntaxError in Python 2. Returns ------- A unicode string containing the contents of the file.
def read_py_file(filename, skip_encoding_cookie=True): """Read a Python file, using the encoding declared inside the file. Parameters ---------- filename : str The path to the file to read. skip_encoding_cookie : bool If True (the default), and the encoding declaration is found in the first two lines, that line will be excluded from the output - compiling a unicode string with an encoding declaration is a SyntaxError in Python 2. Returns ------- A unicode string containing the contents of the file. """ with tokopen(filename) as f: # the open function defined in this module. if skip_encoding_cookie: return "".join(strip_encoding_cookie(f)) else: return f.read()
Read a Python file from a URL, using the encoding declared inside the file. Parameters ---------- url : str The URL from which to fetch the file. errors : str How to handle decoding errors in the file. Options are the same as for bytes.decode(), but here 'replace' is the default. skip_encoding_cookie : bool If True (the default), and the encoding declaration is found in the first two lines, that line will be excluded from the output - compiling a unicode string with an encoding declaration is a SyntaxError in Python 2. Returns ------- A unicode string containing the contents of the file.
def read_py_url(url, errors="replace", skip_encoding_cookie=True): """Read a Python file from a URL, using the encoding declared inside the file. Parameters ---------- url : str The URL from which to fetch the file. errors : str How to handle decoding errors in the file. Options are the same as for bytes.decode(), but here 'replace' is the default. skip_encoding_cookie : bool If True (the default), and the encoding declaration is found in the first two lines, that line will be excluded from the output - compiling a unicode string with an encoding declaration is a SyntaxError in Python 2. Returns ------- A unicode string containing the contents of the file. """ # Deferred import for faster start try: from urllib.request import urlopen # Py 3 except ImportError: from urllib import urlopen response = urlopen(url) buf = io.BytesIO(response.read()) return source_to_unicode(buf, errors, skip_encoding_cookie)
Given a list, returns a readline() function that returns the next element with each call.
def _list_readline(x): """Given a list, returns a readline() function that returns the next element with each call. """ x = iter(x) def readline(): return next(x) return readline
``True`` if on a BSD operating system, else ``False``.
def ON_BSD(): """``True`` if on a BSD operating system, else ``False``.""" return bool(ON_FREEBSD) or bool(ON_NETBSD) or bool(ON_OPENBSD) or bool(ON_DRAGONFLY)
True if we are on BeOS or Haiku.
def ON_BEOS(): """True if we are on BeOS or Haiku.""" return sys.platform == "beos5" or sys.platform == "haiku1"
True if we are on Windows Subsystem for Linux (WSL)
def ON_WSL(): """True if we are on Windows Subsystem for Linux (WSL)""" return "microsoft" in platform.release()
The python version info tuple in a canonical bytes form.
def PYTHON_VERSION_INFO_BYTES(): """The python version info tuple in a canonical bytes form.""" return ".".join(map(str, sys.version_info)).encode()
``True`` if `pygments` is available, else ``False``.
def HAS_PYGMENTS(): """``True`` if `pygments` is available, else ``False``.""" spec = importlib.util.find_spec("pygments") return spec is not None
pygments.__version__ version if available, else None.
def pygments_version(): """pygments.__version__ version if available, else None.""" if HAS_PYGMENTS: import pygments v = pygments.__version__ else: v = None return v
Returns `pygments`'s version as tuple of integers.
def pygments_version_info(): """Returns `pygments`'s version as tuple of integers.""" if HAS_PYGMENTS: return tuple(int(x) for x in pygments_version().strip("<>+-=.").split(".")) else: return None
Tests if the `prompt_toolkit` is available.
def has_prompt_toolkit(): """Tests if the `prompt_toolkit` is available.""" spec = importlib.util.find_spec("prompt_toolkit") return spec is not None
Returns `prompt_toolkit.__version__` if available, else ``None``.
def ptk_version(): """Returns `prompt_toolkit.__version__` if available, else ``None``.""" if has_prompt_toolkit(): import prompt_toolkit return getattr(prompt_toolkit, "__version__", "<0.57") else: return None
Returns `prompt_toolkit`'s version as tuple of integers.
def ptk_version_info(): """Returns `prompt_toolkit`'s version as tuple of integers.""" if has_prompt_toolkit(): return tuple(int(x) for x in ptk_version().strip("<>+-=.").split(".")) else: return None
Checks if readline is available to import.
def is_readline_available(): """Checks if readline is available to import.""" spec = importlib.util.find_spec("readline") return spec is not None
String of all path separators.
def seps(): """String of all path separators.""" s = os.path.sep if os.path.altsep is not None: s += os.path.altsep return s
This is a safe version of os.path.split(), which does not work on input without a drive.
def pathsplit(p): """This is a safe version of os.path.split(), which does not work on input without a drive. """ n = len(p) if n == 0: # lazy object seps does not get initialized when n is zero return "", "" while n and p[n - 1] not in seps: n -= 1 pre = p[:n] pre = pre.rstrip(seps) or pre post = p[n:] return pre, post
This is a safe version of os.path.basename(), which does not work on input without a drive. This version does.
def pathbasename(p): """This is a safe version of os.path.basename(), which does not work on input without a drive. This version does. """ return pathsplit(p)[-1]
Dispatches to the correct platform-dependent expanduser() function.
def expanduser(): """Dispatches to the correct platform-dependent expanduser() function.""" if ON_WINDOWS: return windows_expanduser else: return os.path.expanduser
A Windows-specific expanduser() function for xonsh. This is needed since os.path.expanduser() does not check on Windows if the user actually exists. This restricts expanding the '~' if it is not followed by a separator. That is only '~/' and '~' are expanded.
def windows_expanduser(path): """A Windows-specific expanduser() function for xonsh. This is needed since os.path.expanduser() does not check on Windows if the user actually exists. This restricts expanding the '~' if it is not followed by a separator. That is only '~/' and '~\' are expanded. """ path = str(path) if not path.startswith("~"): return path elif len(path) < 2 or path[1] in seps: return os.path.expanduser(path) else: return path
Returns a tuple contains two strings: the hash and the date.
def githash(): """Returns a tuple contains two strings: the hash and the date.""" install_base = os.path.dirname(__file__) githash_file = f"{install_base}/dev.githash" if not os.path.exists(githash_file): return None, None sha = None date_ = None try: with open(githash_file) as f: sha, date_ = f.read().strip().split("|") except ValueError: pass return sha, date_
The id of the Linux distribution running on, possibly 'unknown'. None on non-Linux platforms.
def linux_distro(): """The id of the Linux distribution running on, possibly 'unknown'. None on non-Linux platforms. """ if ON_LINUX: if distro: ld = distro.id() elif PYTHON_VERSION_INFO < (3, 6, 6): ld = platform.linux_distribution()[0] or "unknown" elif "-ARCH-" in platform.platform(): ld = "arch" # that's the only one we need to know for now else: ld = "unknown" else: ld = None return ld
Returns the path to git for windows, if available and None otherwise.
def git_for_windows_path(): """Returns the path to git for windows, if available and None otherwise.""" import winreg try: key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\GitForWindows") gfwp, _ = winreg.QueryValueEx(key, "InstallPath") except FileNotFoundError: gfwp = None return gfwp