response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Create a server to serve local files. It will create the PREDEFINED_FILES through dd.
def start_server(): """Create a server to serve local files. It will create the PREDEFINED_FILES through dd.""" with TemporaryDirectory() as directory: for file_name, (block_size, count) in PREDEFINED_FILES.items(): subprocess.check_call( [ 'dd', 'if=/dev/zero', f'of={file_name}', f'bs={block_size}', f'count={count}', ], cwd=directory, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, ) handler = partial(QuietSimpleHTTPServer, directory=directory) server = HTTPServer(('localhost', 0), handler) thread = threading.Thread(target=server.serve_forever) thread.start() yield '{}:{}'.format(*server.socket.getsockname()) server.shutdown() thread.join(timeout=0.5)
Apply various transformations on top of the `prepared_requests`'s headers to change the request prepreation behavior.
def transform_headers( request: requests.Request, prepared_request: requests.PreparedRequest ) -> None: """Apply various transformations on top of the `prepared_requests`'s headers to change the request prepreation behavior.""" # Remove 'Content-Length' when it is misplaced by requests. if ( prepared_request.method in IGNORE_CONTENT_LENGTH_METHODS and prepared_request.headers.get('Content-Length') == '0' and request.headers.get('Content-Length') != '0' ): prepared_request.headers.pop('Content-Length') apply_missing_repeated_headers( request.headers, prepared_request )
Update the given `prepared_request`'s headers with the original ones. This allows the requests to be prepared as usual, and then later merged with headers that are specified multiple times.
def apply_missing_repeated_headers( original_headers: HTTPHeadersDict, prepared_request: requests.PreparedRequest ) -> None: """Update the given `prepared_request`'s headers with the original ones. This allows the requests to be prepared as usual, and then later merged with headers that are specified multiple times.""" new_headers = HTTPHeadersDict(prepared_request.headers) for prepared_name, prepared_value in prepared_request.headers.items(): if prepared_name not in original_headers: continue original_keys, original_values = zip(*filter( lambda item: item[0].casefold() == prepared_name.casefold(), original_headers.items() )) if prepared_value not in original_values: # If the current value is not among the initial values # set for this field, then it means that this field got # overridden on the way, and we should preserve it. continue new_headers.popone(prepared_name) new_headers.update(zip(original_keys, original_values)) prepared_request.headers = new_headers
Translate our `args` into `requests.Request` keyword arguments.
def make_request_kwargs( env: Environment, args: argparse.Namespace, base_headers: HTTPHeadersDict = None, request_body_read_callback=lambda chunk: chunk ) -> dict: """ Translate our `args` into `requests.Request` keyword arguments. """ files = args.files # Serialize JSON data, if needed. data = args.data auto_json = data and not args.form if (args.json or auto_json) and isinstance(data, dict): data = json_dict_to_request_body(data) # Finalize headers. headers = make_default_headers(args) if base_headers: headers.update(base_headers) headers.update(args.headers) if args.offline and args.chunked and 'Transfer-Encoding' not in headers: # When online, we let requests set the header instead to be able more # easily verify chunking is taking place. headers['Transfer-Encoding'] = 'chunked' headers = finalize_headers(headers) if (args.form and files) or args.multipart: data, headers['Content-Type'] = get_multipart_data_and_content_type( data=args.multipart_data, boundary=args.boundary, content_type=args.headers.get('Content-Type'), ) return { 'method': args.method.lower(), 'url': args.url, 'headers': headers, 'data': prepare_request_body( env, data, body_read_callback=request_body_read_callback, chunked=args.chunked, offline=args.offline, content_length_header_value=headers.get('Content-Length'), ), 'auth': args.auth, 'params': args.params.items(), }
Handle `--path-as-is` by replacing the path component of the prepared URL with the path component from the original URL. Other parts stay untouched because other (welcome) processing on the URL might have taken place. <https://github.com/httpie/cli/issues/895> <https://ec.haxx.se/http/http-basics#path-as-is> <https://curl.haxx.se/libcurl/c/CURLOPT_PATH_AS_IS.html> >>> ensure_path_as_is('http://foo/../', 'http://foo/?foo=bar') 'http://foo/../?foo=bar'
def ensure_path_as_is(orig_url: str, prepped_url: str) -> str: """ Handle `--path-as-is` by replacing the path component of the prepared URL with the path component from the original URL. Other parts stay untouched because other (welcome) processing on the URL might have taken place. <https://github.com/httpie/cli/issues/895> <https://ec.haxx.se/http/http-basics#path-as-is> <https://curl.haxx.se/libcurl/c/CURLOPT_PATH_AS_IS.html> >>> ensure_path_as_is('http://foo/../', 'http://foo/?foo=bar') 'http://foo/../?foo=bar' """ parsed_orig, parsed_prepped = urlparse(orig_url), urlparse(prepped_url) final_dict = { # noinspection PyProtectedMember **parsed_prepped._asdict(), 'path': parsed_orig.path, } return urlunparse(tuple(final_dict.values()))
Return the path to the httpie configuration directory. This directory isn't guaranteed to exist, and nor are any of its ancestors (only the legacy ~/.httpie, if returned, is guaranteed to exist). XDG Base Directory Specification support: <https://wiki.archlinux.org/index.php/XDG_Base_Directory> $XDG_CONFIG_HOME is supported; $XDG_CONFIG_DIRS is not
def get_default_config_dir() -> Path: """ Return the path to the httpie configuration directory. This directory isn't guaranteed to exist, and nor are any of its ancestors (only the legacy ~/.httpie, if returned, is guaranteed to exist). XDG Base Directory Specification support: <https://wiki.archlinux.org/index.php/XDG_Base_Directory> $XDG_CONFIG_HOME is supported; $XDG_CONFIG_DIRS is not """ # 1. explicitly set through env env_config_dir = os.environ.get(ENV_HTTPIE_CONFIG_DIR) if env_config_dir: return Path(env_config_dir) # 2. Windows if is_windows: return DEFAULT_WINDOWS_CONFIG_DIR home_dir = Path.home() # 3. legacy ~/.httpie legacy_config_dir = home_dir / DEFAULT_RELATIVE_LEGACY_CONFIG_DIR if legacy_config_dir.exists(): return legacy_config_dir # 4. XDG xdg_config_home_dir = os.environ.get( ENV_XDG_CONFIG_HOME, # 4.1. explicit home_dir / DEFAULT_RELATIVE_XDG_CONFIG_HOME # 4.2. default ) return Path(xdg_config_home_dir) / DEFAULT_CONFIG_DIRNAME
The main function. Pre-process args, handle some special types of invocations, and run the main program with error handling. Return exit status code.
def main( args: List[Union[str, bytes]] = sys.argv, env: Environment = Environment() ) -> ExitStatus: """ The main function. Pre-process args, handle some special types of invocations, and run the main program with error handling. Return exit status code. """ from .cli.definition import parser return raw_main( parser=parser, main_program=program, args=args, env=env )
The main program without error handling.
def program(args: argparse.Namespace, env: Environment) -> ExitStatus: """ The main program without error handling. """ # TODO: Refactor and drastically simplify, especially so that the separator logic is elsewhere. exit_status = ExitStatus.SUCCESS downloader = None initial_request: Optional[requests.PreparedRequest] = None final_response: Optional[requests.Response] = None processing_options = ProcessingOptions.from_raw_args(args) def separate(): getattr(env.stdout, 'buffer', env.stdout).write(MESSAGE_SEPARATOR_BYTES) def request_body_read_callback(chunk: bytes): should_pipe_to_stdout = bool( # Request body output desired OUT_REQ_BODY in args.output_options # & not `.read()` already pre-request (e.g., for compression) and initial_request # & non-EOF chunk and chunk ) if should_pipe_to_stdout: return write_raw_data( env, chunk, processing_options=processing_options, headers=initial_request.headers ) try: if args.download: args.follow = True # --download implies --follow. downloader = Downloader(env, output_file=args.output_file, resume=args.download_resume) downloader.pre_request(args.headers) messages = collect_messages(env, args=args, request_body_read_callback=request_body_read_callback) force_separator = False prev_with_body = False # Process messages as they’re generated for message in messages: output_options = OutputOptions.from_message(message, args.output_options) do_write_body = output_options.body if prev_with_body and output_options.any() and (force_separator or not env.stdout_isatty): # Separate after a previous message with body, if needed. See test_tokens.py. separate() force_separator = False if output_options.kind is RequestsMessageKind.REQUEST: if not initial_request: initial_request = message if output_options.body: is_streamed_upload = not isinstance(message.body, (str, bytes)) do_write_body = not is_streamed_upload force_separator = is_streamed_upload and env.stdout_isatty else: final_response = message if args.check_status or downloader: exit_status = http_status_to_exit_status(http_status=message.status_code, follow=args.follow) if exit_status != ExitStatus.SUCCESS and (not env.stdout_isatty or args.quiet == 1): env.log_error(f'HTTP {message.raw.status} {message.raw.reason}', level=LogLevel.WARNING) write_message( requests_message=message, env=env, output_options=output_options._replace( body=do_write_body ), processing_options=processing_options ) prev_with_body = output_options.body # Cleanup if force_separator: separate() if downloader and exit_status == ExitStatus.SUCCESS: # Last response body download. download_stream, download_to = downloader.start( initial_url=initial_request.url, final_response=final_response, ) write_stream(stream=download_stream, outfile=download_to, flush=False) downloader.finish() if downloader.interrupted: exit_status = ExitStatus.ERROR env.log_error( f'Incomplete download: size={downloader.status.total_size};' f' downloaded={downloader.status.downloaded}' ) return exit_status finally: if downloader and not downloader.finished: downloader.failed() if args.output_file and args.output_file_specified: args.output_file.close()
Convert all bytes args to str by decoding them using stdin encoding.
def decode_raw_args( args: List[Union[str, bytes]], stdin_encoding: str ) -> List[str]: """ Convert all bytes args to str by decoding them using stdin encoding. """ return [ arg.decode(stdin_encoding) if type(arg) is bytes else arg for arg in args ]
Parse and validate Content-Range header. <https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html> :param content_range: the value of a Content-Range response header eg. "bytes 21010-47021/47022" :param resumed_from: first byte pos. from the Range request header :return: total size of the response body when fully downloaded.
def parse_content_range(content_range: str, resumed_from: int) -> int: """ Parse and validate Content-Range header. <https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html> :param content_range: the value of a Content-Range response header eg. "bytes 21010-47021/47022" :param resumed_from: first byte pos. from the Range request header :return: total size of the response body when fully downloaded. """ if content_range is None: raise ContentRangeError('Missing Content-Range') pattern = ( r'^bytes (?P<first_byte_pos>\d+)-(?P<last_byte_pos>\d+)' r'/(\*|(?P<instance_length>\d+))$' ) match = re.match(pattern, content_range) if not match: raise ContentRangeError( f'Invalid Content-Range format {content_range!r}') content_range_dict = match.groupdict() first_byte_pos = int(content_range_dict['first_byte_pos']) last_byte_pos = int(content_range_dict['last_byte_pos']) instance_length = ( int(content_range_dict['instance_length']) if content_range_dict['instance_length'] else None ) # "A byte-content-range-spec with a byte-range-resp-spec whose # last- byte-pos value is less than its first-byte-pos value, # or whose instance-length value is less than or equal to its # last-byte-pos value, is invalid. The recipient of an invalid # byte-content-range- spec MUST ignore it and any content # transferred along with it." if (first_byte_pos > last_byte_pos or (instance_length is not None and instance_length <= last_byte_pos)): raise ContentRangeError( f'Invalid Content-Range returned: {content_range!r}') if (first_byte_pos != resumed_from or (instance_length is not None and last_byte_pos + 1 != instance_length)): # Not what we asked for. raise ContentRangeError( f'Unexpected Content-Range returned ({content_range!r})' f' for the requested Range ("bytes={resumed_from}-")' ) return last_byte_pos + 1
Extract and validate filename from a Content-Disposition header. :param content_disposition: Content-Disposition value :return: the filename if present and valid, otherwise `None`
def filename_from_content_disposition( content_disposition: str ) -> Optional[str]: """ Extract and validate filename from a Content-Disposition header. :param content_disposition: Content-Disposition value :return: the filename if present and valid, otherwise `None` """ # attachment; filename=jakubroztocil-httpie-0.4.1-20-g40bd8f6.tar.gz msg = Message(f'Content-Disposition: {content_disposition}') filename = msg.get_filename() if filename: # Basic sanitation. filename = os.path.basename(filename).lstrip('.').strip() if filename: return filename
We default to UTF-8 if text too short, because the detection can return a random encoding leading to confusing results given the `charset_normalizer` version (< 2.0.5). >>> too_short = ']"foo"' >>> detected = from_bytes(too_short.encode()).best().encoding >>> detected 'ascii' >>> too_short.encode().decode(detected) ']"foo"'
def detect_encoding(content: ContentBytes) -> str: """ We default to UTF-8 if text too short, because the detection can return a random encoding leading to confusing results given the `charset_normalizer` version (< 2.0.5). >>> too_short = ']"foo"' >>> detected = from_bytes(too_short.encode()).best().encoding >>> detected 'ascii' >>> too_short.encode().decode(detected) ']"foo"' """ encoding = UTF8 if len(content) > TOO_SMALL_SEQUENCE: match = from_bytes(bytes(content)).best() if match: encoding = match.encoding return encoding
Decode `content` using the given `encoding`. If no `encoding` is provided, the best effort is to guess it from `content`. Unicode errors are replaced.
def smart_decode(content: ContentBytes, encoding: str) -> Tuple[str, str]: """Decode `content` using the given `encoding`. If no `encoding` is provided, the best effort is to guess it from `content`. Unicode errors are replaced. """ if not encoding: encoding = detect_encoding(content) return content.decode(encoding, 'replace'), encoding
Encode `content` using the given `encoding`. Unicode errors are replaced.
def smart_encode(content: str, encoding: str) -> bytes: """Encode `content` using the given `encoding`. Unicode errors are replaced. """ return content.encode(encoding, 'replace')
Detects if a key file is encrypted or not. Copy of the internal urllib function (urllib3.util.ssl_)
def _is_key_file_encrypted(key_file): """Detects if a key file is encrypted or not. Copy of the internal urllib function (urllib3.util.ssl_)""" with open(key_file, "r") as f: for line in f: # Look for Proc-Type: 4,ENCRYPTED if "ENCRYPTED" in line: return True return False
Translate HTTP status code to exit status code. (Relevant only when invoked with --check-status or --download.)
def http_status_to_exit_status(http_status: int, follow=False) -> ExitStatus: """ Translate HTTP status code to exit status code. (Relevant only when invoked with --check-status or --download.) """ if 300 <= http_status <= 399 and not follow: # Redirect return ExitStatus.ERROR_HTTP_3XX elif 400 <= http_status <= 499: # Client Error return ExitStatus.ERROR_HTTP_4XX elif 500 <= http_status <= 599: # Server Error return ExitStatus.ERROR_HTTP_5XX else: return ExitStatus.SUCCESS
Return a humanized string representation of a number of bytes. >>> humanize_bytes(1) '1 B' >>> humanize_bytes(1024, precision=1) '1.0 kB' >>> humanize_bytes(1024 * 123, precision=1) '123.0 kB' >>> humanize_bytes(1024 * 12342, precision=1) '12.1 MB' >>> humanize_bytes(1024 * 12342, precision=2) '12.05 MB' >>> humanize_bytes(1024 * 1234, precision=2) '1.21 MB' >>> humanize_bytes(1024 * 1234 * 1111, precision=2) '1.31 GB' >>> humanize_bytes(1024 * 1234 * 1111, precision=1) '1.3 GB'
def humanize_bytes(n, precision=2): # Author: Doug Latornell # Licence: MIT # URL: https://code.activestate.com/recipes/577081/ """Return a humanized string representation of a number of bytes. >>> humanize_bytes(1) '1 B' >>> humanize_bytes(1024, precision=1) '1.0 kB' >>> humanize_bytes(1024 * 123, precision=1) '123.0 kB' >>> humanize_bytes(1024 * 12342, precision=1) '12.1 MB' >>> humanize_bytes(1024 * 12342, precision=2) '12.05 MB' >>> humanize_bytes(1024 * 1234, precision=2) '1.21 MB' >>> humanize_bytes(1024 * 1234 * 1111, precision=2) '1.31 GB' >>> humanize_bytes(1024 * 1234 * 1111, precision=1) '1.3 GB' """ abbrevs = [ (1 << 50, 'PB'), (1 << 40, 'TB'), (1 << 30, 'GB'), (1 << 20, 'MB'), (1 << 10, 'kB'), (1, 'B') ] if n == 1: return '1 B' for factor, suffix in abbrevs: if n >= factor: break # noinspection PyUnboundLocalVariable return f'{n / factor:.{precision}f} {suffix}'
Return the content type for ``filename`` in format appropriate for Content-Type headers, or ``None`` if the file type is unknown to ``mimetypes``.
def get_content_type(filename): """ Return the content type for ``filename`` in format appropriate for Content-Type headers, or ``None`` if the file type is unknown to ``mimetypes``. """ return mimetypes.guess_type(filename, strict=False)[0]
When ``requests`` stores cookies in ``response.headers['Set-Cookie']`` it concatenates all of them through ``, ``. This function splits cookies apart being careful to not to split on ``, `` which may be part of cookie value.
def split_cookies(cookies): """ When ``requests`` stores cookies in ``response.headers['Set-Cookie']`` it concatenates all of them through ``, ``. This function splits cookies apart being careful to not to split on ``, `` which may be part of cookie value. """ if not cookies: return [] return RE_COOKIE_SPLIT.split(cookies)
Translate `max-age` into `expires` for Requests to take it into account. HACK/FIXME: <https://github.com/psf/requests/issues/5743>
def _max_age_to_expires(cookies, now): """ Translate `max-age` into `expires` for Requests to take it into account. HACK/FIXME: <https://github.com/psf/requests/issues/5743> """ for cookie in cookies: if 'expires' in cookie: continue max_age = cookie.get('max-age') if max_age and max_age.isdigit(): cookie['expires'] = now + float(max_age)
Borrowed from requests.
def parse_content_type_header(header): """Borrowed from requests.""" tokens = header.split(';') content_type, params = tokens[0].strip(), tokens[1:] params_dict = {} items_to_strip = "\"' " for param in params: param = param.strip() if param: key, value = param, True index_of_equals = param.find("=") if index_of_equals != -1: key = param[:index_of_equals].strip(items_to_strip) value = param[index_of_equals + 1:].strip(items_to_strip) params_dict[key.lower()] = value return content_type, params_dict
Parse `s` and update `defaults` with the parsed values. >>> parse_format_options( ... defaults={'json': {'indent': 4, 'sort_keys': True}}, ... s='json.indent:2,json.sort_keys:False', ... ) {'json': {'indent': 2, 'sort_keys': False}}
def parse_format_options(s: str, defaults: Optional[dict]) -> dict: """ Parse `s` and update `defaults` with the parsed values. >>> parse_format_options( ... defaults={'json': {'indent': 4, 'sort_keys': True}}, ... s='json.indent:2,json.sort_keys:False', ... ) {'json': {'indent': 2, 'sort_keys': False}} """ value_map = { 'true': True, 'false': False, } options = deepcopy(defaults or {}) for option in s.split(','): try: path, value = option.lower().split(':') section, key = path.split('.') except ValueError: raise argparse.ArgumentTypeError(f'invalid option {option!r}') if value in value_map: parsed_value = value_map[value] else: if value.isnumeric(): parsed_value = int(value) else: parsed_value = value if defaults is None: options.setdefault(section, {}) else: try: default_value = defaults[section][key] except KeyError: raise argparse.ArgumentTypeError( f'invalid key {path!r}') default_type, parsed_type = type(default_value), type(parsed_value) if parsed_type is not default_type: raise argparse.ArgumentTypeError( 'invalid value' f' {value!r} in {option!r}' f' (expected {default_type.__name__}' f' got {parsed_type.__name__})' ) options[section][key] = parsed_value return options
Take an existing argparse parser, and create a spec from it.
def parser_to_parser_spec(parser: argparse.ArgumentParser, **kwargs) -> ParserSpec: """Take an existing argparse parser, and create a spec from it.""" return ParserSpec( program=parser.prog, description=parser.description, epilog=parser.epilog, **kwargs )
We allow primitive values to be passed to forms via JSON key/value syntax. But complex values lead to an error because there’s no clear way to serialize them.
def convert_json_value_to_form_if_needed(in_json_mode: bool, processor: Callable[[KeyValueArg], JSONType]) -> Callable[[], str]: """ We allow primitive values to be passed to forms via JSON key/value syntax. But complex values lead to an error because there’s no clear way to serialize them. """ if in_json_mode: return processor @functools.wraps(processor) def wrapper(*args, **kwargs) -> str: try: output = processor(*args, **kwargs) except ParseError: output = None if isinstance(output, (str, int, float)): return str(output) else: raise ParseError('Cannot use complex JSON value types with --form/--multipart.') return wrapper
Propagate the top-level list, if that’s what we got.
def unwrap_top_level_list_if_needed(data: dict): """ Propagate the top-level list, if that’s what we got. """ if len(data) == 1: key, value = list(data.items())[0] if isinstance(value, NestedJSONArray): assert key == EMPTY_STRING return value return data
start: root_path path* root_path: (literal | index_path | append_path) literal: TEXT | NUMBER path: key_path | index_path | append_path key_path: LEFT_BRACKET TEXT RIGHT_BRACKET index_path: LEFT_BRACKET NUMBER RIGHT_BRACKET append_path: LEFT_BRACKET RIGHT_BRACKET
def parse(source: str) -> Iterator[Path]: """ start: root_path path* root_path: (literal | index_path | append_path) literal: TEXT | NUMBER path: key_path | index_path | append_path key_path: LEFT_BRACKET TEXT RIGHT_BRACKET index_path: LEFT_BRACKET NUMBER RIGHT_BRACKET append_path: LEFT_BRACKET RIGHT_BRACKET """ tokens = list(tokenize(source)) cursor = 0 def can_advance(): return cursor < len(tokens) # noinspection PyShadowingNames def expect(*kinds): nonlocal cursor assert kinds if can_advance(): token = tokens[cursor] cursor += 1 if token.kind in kinds: return token elif tokens: token = tokens[-1]._replace( start=tokens[-1].end + 0, end=tokens[-1].end + 1, ) else: token = None if len(kinds) == 1: suffix = kinds[0].to_name() else: suffix = ', '.join(kind.to_name() for kind in kinds[:-1]) suffix += ' or ' + kinds[-1].to_name() message = f'Expecting {suffix}' raise NestedJSONSyntaxError(source, token, message) # noinspection PyShadowingNames def parse_root(): tokens = [] if not can_advance(): return Path( kind=PathAction.KEY, accessor=EMPTY_STRING, is_root=True ) # (literal | index_path | append_path)? token = expect(*LITERAL_TOKENS, TokenKind.LEFT_BRACKET) tokens.append(token) if token.kind in LITERAL_TOKENS: action = PathAction.KEY value = str(token.value) elif token.kind is TokenKind.LEFT_BRACKET: token = expect(TokenKind.NUMBER, TokenKind.RIGHT_BRACKET) tokens.append(token) if token.kind is TokenKind.NUMBER: action = PathAction.INDEX value = token.value tokens.append(expect(TokenKind.RIGHT_BRACKET)) elif token.kind is TokenKind.RIGHT_BRACKET: action = PathAction.APPEND value = None else: assert_cant_happen() else: assert_cant_happen() # noinspection PyUnboundLocalVariable return Path( kind=action, accessor=value, tokens=tokens, is_root=True ) yield parse_root() # path* while can_advance(): path_tokens = [expect(TokenKind.LEFT_BRACKET)] token = expect(TokenKind.TEXT, TokenKind.NUMBER, TokenKind.RIGHT_BRACKET) path_tokens.append(token) if token.kind is TokenKind.RIGHT_BRACKET: path = Path(PathAction.APPEND, tokens=path_tokens) elif token.kind is TokenKind.TEXT: path = Path(PathAction.KEY, token.value, tokens=path_tokens) path_tokens.append(expect(TokenKind.RIGHT_BRACKET)) elif token.kind is TokenKind.NUMBER: path = Path(PathAction.INDEX, token.value, tokens=path_tokens) path_tokens.append(expect(TokenKind.RIGHT_BRACKET)) else: assert_cant_happen() # noinspection PyUnboundLocalVariable yield path
Perform a double fork procedure* to detach from the parent process so that we don't block the user even if their original command's execution is done but the release fetcher is not. [1]: https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap11.html#tag_11_01_03
def _spawn_posix(args: List[str], process_context: ProcessContext) -> None: """ Perform a double fork procedure* to detach from the parent process so that we don't block the user even if their original command's execution is done but the release fetcher is not. [1]: https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap11.html#tag_11_01_03 """ from httpie.core import main try: pid = os.fork() if pid > 0: return except OSError: os._exit(1) os.setsid() try: pid = os.fork() if pid > 0: os._exit(0) except OSError: os._exit(1) # Close all standard inputs/outputs sys.stdin.close() sys.stdout.close() sys.stderr.close() if platform.system() == 'Darwin': # Double-fork is not reliable on MacOS, so we'll use a subprocess # to ensure the task is isolated properly. process = _start_process(args, env=process_context) # Unlike windows, since we already completed the fork procedure # we can simply join the process and wait for it. process.communicate() else: os.environ.update(process_context) with suppress(BaseException): main(['http'] + args) os._exit(0)
Spawn a new process to run the given command.
def _spawn(args: List[str], process_context: ProcessContext) -> None: """ Spawn a new process to run the given command. """ if is_windows: _spawn_windows(args, process_context) else: _spawn_posix(args, process_context)
Return a context manager that suppress all possible errors. Note: if you have set the developer_mode=True in your config, then it will show all errors for easier debugging.
def _get_suppress_context(env: Environment) -> Any: """Return a context manager that suppress all possible errors. Note: if you have set the developer_mode=True in your config, then it will show all errors for easier debugging.""" if env.config.developer_mode: return nullcontext() else: return suppress(BaseException)
Control the execution of the update checker (suppress errors, trigger auto updates etc.)
def _update_checker( func: Callable[[Environment], None] ) -> Callable[[Environment], None]: """Control the execution of the update checker (suppress errors, trigger auto updates etc.)""" def wrapper(env: Environment) -> None: with _get_suppress_context(env): func(env) with _get_suppress_context(env): maybe_fetch_updates(env) return wrapper
If there is a new update available, return the warning text. Otherwise just return None.
def _get_update_status(env: Environment) -> Optional[str]: """If there is a new update available, return the warning text. Otherwise just return None.""" file = env.config.version_info_file if not file.exists(): return None with _get_suppress_context(env): # If the user quickly spawns multiple httpie processes # we don't want to end in a race. with open_with_lockfile(file) as stream: version_info = json.load(stream) available_channels = version_info['last_released_versions'] if BUILD_CHANNEL not in available_channels: return None current_version = httpie.__version__ last_released_version = available_channels[BUILD_CHANNEL] if not is_version_greater(last_released_version, current_version): return None text = UPDATE_MESSAGE_FORMAT.format( last_released_version=last_released_version, installation_method=BUILD_CHANNEL, ) return text
Load the given cookies to the cookie jar while maintaining support for the old cookie layout.
def pre_process(session: 'Session', cookies: Any) -> List[Dict[str, Any]]: """Load the given cookies to the cookie jar while maintaining support for the old cookie layout.""" is_old_style = isinstance(cookies, dict) if is_old_style: normalized_cookies = [ { 'name': key, **value } for key, value in cookies.items() ] else: normalized_cookies = cookies should_issue_warning = is_old_style and any( cookie.get('domain', '') == '' for cookie in normalized_cookies ) if should_issue_warning: warning = INSECURE_COOKIE_JAR_WARNING.format(hostname=session.bound_host, session_id=session.session_id) if not session.is_anonymous: warning += INSECURE_COOKIE_JAR_WARNING_FOR_NAMED_SESSIONS warning += INSECURE_COOKIE_SECURITY_LINK session.warn_legacy_usage(warning) return normalized_cookies
Convert the cookies to their original format for maximum compatibility.
def post_process( normalized_cookies: List[Dict[str, Any]], *, original_type: Type[Any] ) -> Any: """Convert the cookies to their original format for maximum compatibility.""" if issubclass(original_type, dict): return { cookie.pop('name'): cookie for cookie in normalized_cookies } else: return normalized_cookies
Serialize the headers into a unified form and issue a warning if the session file is using the old layout.
def pre_process(session: 'Session', headers: Any) -> List[Dict[str, Any]]: """Serialize the headers into a unified form and issue a warning if the session file is using the old layout.""" is_old_style = isinstance(headers, dict) if is_old_style: normalized_headers = list(headers.items()) else: normalized_headers = [ (item['name'], item['value']) for item in headers ] if is_old_style: warning = OLD_HEADER_STORE_WARNING.format(hostname=session.bound_host, session_id=session.session_id) if not session.is_anonymous: warning += OLD_HEADER_STORE_WARNING_FOR_NAMED_SESSIONS warning += OLD_HEADER_STORE_LINK session.warn_legacy_usage(warning) return normalized_headers
Deserialize given header store into the original form it was used in.
def post_process( normalized_headers: List[Dict[str, Any]], *, original_type: Type[Any] ) -> Any: """Deserialize given header store into the original form it was used in.""" if issubclass(original_type, dict): # For the legacy behavior, preserve the last value. return { item['name']: item['value'] for item in normalized_headers } else: return normalized_headers
Check whether http/https parser can parse the arguments.
def is_http_command(args: List[Union[str, bytes]], env: Environment) -> bool: """Check whether http/https parser can parse the arguments.""" from httpie.cli.definition import parser as http_parser from httpie.manager.cli import COMMANDS # If the user already selected a top-level sub-command, never # show the http/https version. E.g httpie plugins pie.dev/post if len(args) >= 1 and args[0] in COMMANDS: return False with env.as_silent(): try: http_parser.parse_args(env=env, args=args) except (Exception, SystemExit): return False else: return True
Simple JSON loading from `data`.
def load_prefixed_json(data: str) -> Tuple[str, json.JSONDecoder]: """Simple JSON loading from `data`. """ # First, the full data. try: return '', load_json_preserve_order_and_dupe_keys(data) except ValueError: pass # Then, try to find the start of the actual body. data_prefix, body = parse_prefixed_json(data) try: return data_prefix, load_json_preserve_order_and_dupe_keys(body) except ValueError: raise ValueError('Invalid JSON')
Find the potential JSON body from `data`. Sometimes the JSON body is prefixed with a XSSI magic string, specific to the server. Return a tuple (data prefix, actual JSON body).
def parse_prefixed_json(data: str) -> Tuple[str, str]: """Find the potential JSON body from `data`. Sometimes the JSON body is prefixed with a XSSI magic string, specific to the server. Return a tuple (data prefix, actual JSON body). """ matches = re.findall(PREFIX_REGEX, data) data_prefix = matches[0] if matches else '' body = data[len(data_prefix):] return data_prefix, body
Write the output stream.
def write_stream( stream: BaseStream, outfile: Union[IO, TextIO], flush: bool ): """Write the output stream.""" try: # Writing bytes so we use the buffer interface. buf = outfile.buffer except AttributeError: buf = outfile for chunk in stream: buf.write(chunk) if flush: outfile.flush()
Like `write`, but colorized chunks are written as text directly to `outfile` to ensure it gets processed by colorama. Applies only to Windows and colorized terminal output.
def write_stream_with_colors_win( stream: 'BaseStream', outfile: TextIO, flush: bool ): """Like `write`, but colorized chunks are written as text directly to `outfile` to ensure it gets processed by colorama. Applies only to Windows and colorized terminal output. """ color = b'\x1b[' encoding = outfile.encoding for chunk in stream: if color in chunk: outfile.write(chunk.decode(encoding)) else: outfile.buffer.write(chunk) if flush: outfile.flush()
Pick the right stream type and kwargs for it based on `env` and `args`.
def get_stream_type_and_kwargs( env: Environment, processing_options: ProcessingOptions, message_type: Type[HTTPMessage], headers: HTTPHeadersDict, ) -> Tuple[Type['BaseStream'], dict]: """Pick the right stream type and kwargs for it based on `env` and `args`. """ is_stream = processing_options.stream prettify_groups = processing_options.get_prettify(env) if not is_stream and message_type is HTTPResponse: # If this is a response, then check the headers for determining # auto-streaming. raw_content_type_header = headers.get('Content-Type', None) if raw_content_type_header: content_type_header, _ = parse_content_type_header(raw_content_type_header) is_stream = (content_type_header == 'text/event-stream') if not env.stdout_isatty and not prettify_groups: stream_class = RawStream stream_kwargs = { 'chunk_size': ( RawStream.CHUNK_SIZE_BY_LINE if is_stream else RawStream.CHUNK_SIZE ) } else: stream_class = EncodedStream stream_kwargs = { 'env': env, } if message_type is HTTPResponse: stream_kwargs.update({ 'mime_overwrite': processing_options.response_mime, 'encoding_overwrite': processing_options.response_charset, }) if prettify_groups: stream_class = PrettyStream if is_stream else BufferedPrettyStream stream_kwargs.update({ 'conversion': Conversion(), 'formatting': Formatting( env=env, groups=prettify_groups, color_scheme=processing_options.style, explicit_json=processing_options.json, format_options=processing_options.format_options, ) }) return stream_class, stream_kwargs
Parse given XML `data` string into an appropriate :class:`~xml.dom.minidom.Document` object.
def parse_xml(data: str) -> 'Document': """Parse given XML `data` string into an appropriate :class:`~xml.dom.minidom.Document` object.""" from defusedxml.minidom import parseString return parseString(data)
Render the given :class:`~xml.dom.minidom.Document` `document` into a prettified string.
def pretty_xml(document: 'Document', declaration: Optional[str] = None, encoding: Optional[str] = UTF8, indent: int = 2) -> str: """Render the given :class:`~xml.dom.minidom.Document` `document` into a prettified string.""" kwargs = { 'encoding': encoding or UTF8, 'indent': ' ' * indent, } body = document.toprettyxml(**kwargs).decode(kwargs['encoding']) # Remove blank lines automatically added by `toprettyxml()`. lines = [line for line in body.splitlines() if line.strip()] # xml.dom automatically adds the declaration, even if # it is not present in the actual body. Remove it. if len(lines) >= 1 and parse_declaration(lines[0]): lines.pop(0) if declaration: lines.insert(0, declaration) return '\n'.join(lines)
Check whether `program`'s man pages are available on this system.
def is_available(program: str) -> bool: """ Check whether `program`'s man pages are available on this system. """ if NO_MAN_PAGES or os.system == 'nt': return False try: process = subprocess.run( [MAN_COMMAND, MAN_PAGE_SECTION, program], shell=False, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL ) except Exception: # There might be some errors outside the process, e.g # a permission error to execute something that is not an # executable. return False else: return process.returncode == 0
Open the system man page for the given command (http/https/httpie).
def display_for(env: Environment, program: str) -> None: """ Open the system man page for the given command (http/https/httpie). """ subprocess.run( [MAN_COMMAND, MAN_PAGE_SECTION, program], stdout=env.stdout, stderr=env.stderr )
Render any `rich` object in a fake console and return a *style-less* version of it as a string.
def render_as_string(renderable: RenderableType) -> str: """Render any `rich` object in a fake console and return a *style-less* version of it as a string.""" with open(os.devnull, 'w') as null_stream: fake_console = Console(file=null_stream, record=True, theme=_make_rich_color_theme()) fake_console.print(renderable) return fake_console.export_text()
Enable a highlighter temporarily.
def enable_highlighter( console: Console, highlighter: Highlighter, ) -> Iterator[Console]: """Enable a highlighter temporarily.""" original_highlighter = console.highlighter try: console.highlighter = highlighter yield console finally: console.highlighter = original_highlighter
Make pytest-httpbin's CA trusted by default. (Same as `httpbin_ca_bundle`, just auto-used.).
def httpbin_add_ca_bundle(monkeypatch): """ Make pytest-httpbin's CA trusted by default. (Same as `httpbin_ca_bundle`, just auto-used.). """ monkeypatch.setenv('REQUESTS_CA_BUNDLE', certs.where())
Like the `httpbin_secure` fixture, but without the make-CA-trusted-by-default.
def httpbin_secure_untrusted(monkeypatch, httpbin_secure): """ Like the `httpbin_secure` fixture, but without the make-CA-trusted-by-default. """ monkeypatch.delenv('REQUESTS_CA_BUNDLE') return httpbin_secure
Injects `pyOpenSSL` module to make sure `requests` will use it. <https://github.com/psf/requests/pull/5443#issuecomment-645740394>
def pyopenssl_inject(): """ Injects `pyOpenSSL` module to make sure `requests` will use it. <https://github.com/psf/requests/pull/5443#issuecomment-645740394> """ if IS_PYOPENSSL: try: import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() except ModuleNotFoundError: pytest.fail('Missing "pyopenssl" module.') yield
When credentials are passed in URL and via -a at the same time, then the ones from -a are used.
def test_credentials_in_url_auth_flag_has_priority(httpbin_both): """When credentials are passed in URL and via -a at the same time, then the ones from -a are used.""" url = add_auth(httpbin_both.url + '/basic-auth/user/password', auth='user:wrong') r = http('--auth=user:password', 'GET', url) assert HTTP_OK in r assert r.json == {'authenticated': True, 'user': 'user'}
https://github.com/httpie/cli/issues/242
def test_only_username_in_url(url): """ https://github.com/httpie/cli/issues/242 """ args = httpie.cli.definition.parser.parse_args(args=[url], env=MockEnvironment()) assert args.auth assert args.auth.username == 'username' assert args.auth.password == ''
Some tests wrap/crop the output depending on the size of the executed terminal, which might not be consistent through all runs. This fixture ensures every run uses the same exact configuration.
def ignore_terminal_size(monkeypatch): """Some tests wrap/crop the output depending on the size of the executed terminal, which might not be consistent through all runs. This fixture ensures every run uses the same exact configuration. """ def fake_terminal_size(*args, **kwargs): return os.terminal_size(PREDEFINED_TERMINAL_SIZE) # Setting COLUMNS as an env var is required for 3.8< monkeypatch.setitem(os.environ, 'COLUMNS', str(PREDEFINED_TERMINAL_SIZE[0])) monkeypatch.setattr(shutil, 'get_terminal_size', fake_terminal_size) monkeypatch.setattr(os, 'get_terminal_size', fake_terminal_size)
User set cookies ARE NOT persisted within redirects when there is no session, even on the same domain.
def test_explicit_user_set_cookie(httpbin, target_httpbin, request): """User set cookies ARE NOT persisted within redirects when there is no session, even on the same domain.""" target_httpbin = request.getfixturevalue(target_httpbin) r = http( '--follow', httpbin + '/redirect-to', f'url=={target_httpbin}/cookies', 'Cookie:a=b' ) assert r.json == {'cookies': {}}
User set cookies ARE persisted within redirects when there is A session, even on the same domain.
def test_explicit_user_set_cookie_in_session(tmp_path, httpbin, target_httpbin, request): """User set cookies ARE persisted within redirects when there is A session, even on the same domain.""" target_httpbin = request.getfixturevalue(target_httpbin) r = http( '--follow', '--session', str(tmp_path / 'session.json'), httpbin + '/redirect-to', f'url=={target_httpbin}/cookies', 'Cookie:a=b' ) assert r.json == {'cookies': {'a': 'b'}}
User set cookies ARE persisted within redirects when there is A session, even on the same domain.
def test_saved_user_set_cookie_in_session(tmp_path, httpbin, target_httpbin, request): """User set cookies ARE persisted within redirects when there is A session, even on the same domain.""" target_httpbin = request.getfixturevalue(target_httpbin) http( '--follow', '--session', str(tmp_path / 'session.json'), httpbin + '/get', 'Cookie:a=b' ) r = http( '--follow', '--session', str(tmp_path / 'session.json'), httpbin + '/redirect-to', f'url=={target_httpbin}/cookies', ) assert r.json == {'cookies': {'a': 'b'}}
User set headers ARE persisted within redirects even on different domains domain with or without an active session.
def test_explicit_user_set_headers(httpbin, tmp_path, target_httpbin, session, request): """ User set headers ARE persisted within redirects even on different domains domain with or without an active session. """ target_httpbin = request.getfixturevalue(target_httpbin) session_args = [] if session: session_args.extend([ '--session', str(tmp_path / 'session.json') ]) r = http( '--follow', *session_args, httpbin + '/redirect-to', f'url=={target_httpbin}/get', 'X-Custom-Header:value' ) assert 'X-Custom-Header' in r.json['headers']
Server set cookies ARE persisted on the same domain when they are forwarded.
def test_server_set_cookie_on_redirect_same_domain(tmp_path, httpbin, session): """Server set cookies ARE persisted on the same domain when they are forwarded.""" session_args = [] if session: session_args.extend([ '--session', str(tmp_path / 'session.json') ]) r = http( '--follow', *session_args, httpbin + '/cookies/set/a/b', ) assert r.json['cookies'] == {'a': 'b'}
Saved session cookies ARE persisted when making a new request to the same domain.
def test_saved_session_cookies_on_same_domain(tmp_path, httpbin): """Saved session cookies ARE persisted when making a new request to the same domain.""" http( '--session', str(tmp_path / 'session.json'), httpbin + '/cookies/set/a/b' ) r = http( '--session', str(tmp_path / 'session.json'), httpbin + '/cookies' ) assert r.json == {'cookies': {'a': 'b'}}
Saved session cookies ARE persisted when making a new request to a different domain.
def test_saved_session_cookies_on_different_domain(tmp_path, httpbin, remote_httpbin): """Saved session cookies ARE persisted when making a new request to a different domain.""" http( '--session', str(tmp_path / 'session.json'), httpbin + '/cookies/set/a/b' ) r = http( '--session', str(tmp_path / 'session.json'), remote_httpbin + '/cookies' ) assert r.json == {'cookies': {}}
<https://github.com/httpie/cli/issues/644>
def test_default_headers_case_insensitive(httpbin): """ <https://github.com/httpie/cli/issues/644> """ r = http( '--debug', '--print=H', httpbin + '/post', 'CONTENT-TYPE:application/json-patch+json', 'a=b', ) assert 'CONTENT-TYPE: application/json-patch+json' in r assert 'Content-Type' not in r
<https://github.com/httpie/cli/issues/840>
def test_form_POST_file_redirected_stdin(httpbin): """ <https://github.com/httpie/cli/issues/840> """ with open(FILE_PATH, encoding=UTF8): r = http( '--form', 'POST', httpbin + '/post', f'file@{FILE_PATH}', tolerate_error_exit_status=True, env=MockEnvironment( stdin=StdinBytesIO(FILE_PATH.read_bytes()), stdin_isatty=False, ), ) assert r.exit_status == ExitStatus.ERROR assert 'cannot be mixed' in r.stderr
Test JSON bodies preceded by non-JSON data.
def test_json_formatter_with_body_preceded_by_non_json_data( data_prefix, json_data, pretty ): """Test JSON bodies preceded by non-JSON data.""" body = data_prefix + json.dumps(json_data) content_type = 'application/json;charset=utf8' responses.add( responses.GET, DUMMY_URL, body=body, content_type=content_type, ) colored_output = pretty in {'all', 'colors'} env = MockEnvironment(colors=256) if colored_output else None r = http('--pretty', pretty, DUMMY_URL, env=env) indent = None if pretty in {'none', 'colors'} else 4 expected_body = data_prefix + json.dumps(json_data, indent=indent) if colored_output: fmt = ColorFormatter( env, format_options={'json': {'format': True, 'indent': 4}} ) expected_body = fmt.format_body(expected_body, content_type) # Check to ensure the non-JSON data prefix is colored only one time, # meaning it was correctly handled as a whole. assert ( TEST_PREFIX_TOKEN_COLOR + data_prefix in expected_body ), expected_body assert expected_body in r
JSON with duplicate keys should be handled correctly.
def test_duplicate_keys_support_from_response(): """JSON with duplicate keys should be handled correctly.""" responses.add( responses.GET, DUMMY_URL, body=JSON_WITH_DUPES_RAW, content_type='application/json', ) args = ('--pretty', 'format', DUMMY_URL) # Check implicit --sorted if JsonDictPreservingDuplicateKeys.SUPPORTS_SORTING: r = http(*args) assert JSON_WITH_DUPES_FORMATTED_SORTED in r # Check --unsorted r = http(*args, '--unsorted') assert JSON_WITH_DUPES_FORMATTED_UNSORTED in r
JSON file with duplicate keys should be handled correctly.
def test_duplicate_keys_support_from_input_file(): """JSON file with duplicate keys should be handled correctly.""" args = ( '--verbose', '--offline', DUMMY_URL, f'@{JSON_WITH_DUPE_KEYS_FILE_PATH}', ) # Check implicit --sorted if JsonDictPreservingDuplicateKeys.SUPPORTS_SORTING: r = http(*args) assert JSON_WITH_DUPES_FORMATTED_SORTED in r # Check --unsorted r = http(*args, '--unsorted') assert JSON_WITH_DUPES_FORMATTED_UNSORTED in r
Absence of response should be handled gracefully with --download
def test_offline_download(): """Absence of response should be handled gracefully with --download""" r = http( '--offline', '--download', 'https://this-should.never-resolve/foo', ) assert 'GET /foo' in r
https://github.com/httpie/cli/issues/235
def test_Host_header_overwrite(httpbin): """ https://github.com/httpie/cli/issues/235 """ host = 'pie.dev' url = httpbin + '/get' r = http('--print=hH', url, f'host:{host}') assert HTTP_OK in r assert r.lower().count('host:') == 1 assert f'host: {host}' in r
https://github.com/httpie/cli/issues/252
def test_output_devnull(httpbin): """ https://github.com/httpie/cli/issues/252 """ http('--output=/dev/null', httpbin + '/get')
<https://github.com/httpie/cli/issues/1006>
def test_verbose_redirected_stdout_separator(httpbin): """ <https://github.com/httpie/cli/issues/1006> """ r = http( '-v', httpbin + '/post', 'a=b', env=MockEnvironment(stdout_isatty=False), ) assert '}HTTP/' not in r assert_output_matches(r, [ Expect.REQUEST_HEADERS, Expect.BODY, Expect.SEPARATOR, Expect.RESPONSE_HEADERS, Expect.BODY, ])
Test that --stream works with prettified redirected output.
def test_pretty_redirected_stream(httpbin): """Test that --stream works with prettified redirected output.""" env = MockEnvironment( colors=256, stdin=StdinBytesIO(BIN_FILE_PATH.read_bytes()), stdin_isatty=False, stdout_isatty=False, ) r = http('--verbose', '--pretty=all', '--stream', 'GET', httpbin + '/get', env=env) assert BINARY_SUPPRESSED_NOTICE.decode() in r
Test that --stream works with non-prettified redirected terminal output.
def test_encoded_stream(httpbin): """Test that --stream works with non-prettified redirected terminal output.""" env = MockEnvironment( stdin=StdinBytesIO(BIN_FILE_PATH.read_bytes()), stdin_isatty=False, ) r = http('--pretty=none', '--stream', '--verbose', 'GET', httpbin + '/get', env=env) assert BINARY_SUPPRESSED_NOTICE.decode() in r
Test that --stream works with non-prettified redirected terminal output.
def test_redirected_stream(httpbin): """Test that --stream works with non-prettified redirected terminal output.""" env = MockEnvironment( stdout_isatty=False, stdin_isatty=False, stdin=StdinBytesIO(BIN_FILE_PATH.read_bytes()), ) r = http('--pretty=none', '--stream', '--verbose', 'GET', httpbin + '/get', env=env) assert BIN_FILE_CONTENT in r
Test XML formatter limits with data containing comments, doctypes and other XML-specific subtles.
def test_valid_xml(file): """Test XML formatter limits with data containing comments, doctypes and other XML-specific subtles. """ if 'standalone' in file.stem and sys.version_info < (3, 9): pytest.skip('Standalone XML requires Python 3.9+') xml_data = file.read_text(encoding=UTF8) expected_xml_file = file.with_name(file.name.replace('_raw', '_formatted')) expected_xml_output = expected_xml_file.read_text(encoding=UTF8) responses.add( responses.GET, DUMMY_URL, body=xml_data, content_type='application/xml', ) r = http(DUMMY_URL) assert expected_xml_output in r
XHTML responses are handled by the XML formatter.
def test_xml_xhtml(): """XHTML responses are handled by the XML formatter.""" file = XML_FILES_PATH / 'xhtml' / 'xhtml_raw.xml' xml_data = file.read_text(encoding=UTF8) # Python < 3.8 was sorting attributes (https://bugs.python.org/issue34160) # so we have 2 different output expected given the Python version. expected_file_name = ( 'xhtml_formatted_python_less_than_3.8.xml' if sys.version_info < (3, 8) else 'xhtml_formatted.xml' ) expected_xml_file = file.with_name(expected_file_name) expected_xml_output = expected_xml_file.read_text(encoding=UTF8) responses.add( responses.GET, DUMMY_URL, body=xml_data, content_type='application/xhtml+xml', ) r = http(DUMMY_URL) assert expected_xml_output in r
Testing several problematic XML files, none should be formatted and none should make HTTPie to crash.
def test_invalid_xml(file): """Testing several problematic XML files, none should be formatted and none should make HTTPie to crash. """ xml_data = file.read_text(encoding=UTF8) responses.add( responses.GET, DUMMY_URL, body=xml_data, content_type='application/xml', ) # No formatting done, data is simply printed as-is. r = http(DUMMY_URL) assert xml_data in r
Back slashes need to be escaped in ITEM args, even in Windows paths.
def patharg(path): """ Back slashes need to be escaped in ITEM args, even in Windows paths. """ return str(path).replace('\\', '\\\\\\')
A custom HTTP server implementation for our tests, that is built on top of the http.server module. Handy when we need to deal with details which httpbin can not capture.
def http_server(): """A custom HTTP server implementation for our tests, that is built on top of the http.server module. Handy when we need to deal with details which httpbin can not capture.""" with _http_server() as server: yield '{0}:{1}'.format(*server.socket.getsockname())
Just like the http_server, but uses the static `localhost` name for the host.
def localhost_http_server(): """Just like the http_server, but uses the static `localhost` name for the host.""" with _http_server() as server: yield 'localhost:{1}'.format(*server.socket.getsockname())
Run HTTPie manager command with the given args/kwargs, and capture stderr/out and exit status.
def httpie( *args, **kwargs ) -> StrCLIResponse: """ Run HTTPie manager command with the given args/kwargs, and capture stderr/out and exit status. """ env = kwargs.setdefault('env', MockEnvironment()) cli_args = ['httpie'] if not kwargs.pop('no_debug', False): cli_args.append('--debug') cli_args += normalize_args(args) exit_status = manager.main( args=cli_args, **kwargs ) env.stdout.seek(0) env.stderr.seek(0) try: response = BaseCLIResponse.from_raw_data(env.stdout.read()) response.stderr = env.stderr.read() response.exit_status = exit_status response.args = cli_args finally: env.stdout.truncate(0) env.stderr.truncate(0) env.stdout.seek(0) env.stderr.seek(0) return response
Run HTTPie and capture stderr/out and exit status. Content written to devnull will be captured only if env.devnull is set manually. Invoke `httpie.core.main()` with `args` and `kwargs`, and return a `CLIResponse` subclass instance. The return value is either a `StrCLIResponse`, or `BytesCLIResponse` if unable to decode the output. Devnull is string when possible, bytes otherwise. The response has the following attributes: `stdout` is represented by the instance itself (print r) `stderr`: text written to stderr `devnull` text written to devnull. `exit_status`: the exit status `json`: decoded JSON (if possible) or `None` Exceptions are propagated. If you pass ``tolerate_error_exit_status=True``, then error exit statuses won't result into an exception. Example: $ http --auth=user:password GET pie.dev/basic-auth/user/password >>> httpbin = getfixture('httpbin') >>> r = http('-a', 'user:pw', httpbin + '/basic-auth/user/pw') >>> type(r) == StrCLIResponse True >>> r.exit_status is ExitStatus.SUCCESS True >>> r.stderr '' >>> 'HTTP/1.1 200 OK' in r True >>> r.json == {'authenticated': True, 'user': 'user'} True
def http( *args, program_name='http', tolerate_error_exit_status=False, **kwargs, ) -> Union[StrCLIResponse, BytesCLIResponse]: # noinspection PyUnresolvedReferences """ Run HTTPie and capture stderr/out and exit status. Content written to devnull will be captured only if env.devnull is set manually. Invoke `httpie.core.main()` with `args` and `kwargs`, and return a `CLIResponse` subclass instance. The return value is either a `StrCLIResponse`, or `BytesCLIResponse` if unable to decode the output. Devnull is string when possible, bytes otherwise. The response has the following attributes: `stdout` is represented by the instance itself (print r) `stderr`: text written to stderr `devnull` text written to devnull. `exit_status`: the exit status `json`: decoded JSON (if possible) or `None` Exceptions are propagated. If you pass ``tolerate_error_exit_status=True``, then error exit statuses won't result into an exception. Example: $ http --auth=user:password GET pie.dev/basic-auth/user/password >>> httpbin = getfixture('httpbin') >>> r = http('-a', 'user:pw', httpbin + '/basic-auth/user/pw') >>> type(r) == StrCLIResponse True >>> r.exit_status is ExitStatus.SUCCESS True >>> r.stderr '' >>> 'HTTP/1.1 200 OK' in r True >>> r.json == {'authenticated': True, 'user': 'user'} True """ env = kwargs.get('env') if not env: env = kwargs['env'] = MockEnvironment() stdout = env.stdout stderr = env.stderr devnull = env.devnull args = list(args) args_with_config_defaults = args + env.config.default_options add_to_args = [] if '--debug' not in args_with_config_defaults: if (not tolerate_error_exit_status and '--traceback' not in args_with_config_defaults): add_to_args.append('--traceback') if not any('--timeout' in arg for arg in args_with_config_defaults): add_to_args.append('--timeout=3') complete_args = [program_name, *add_to_args, *args] # print(' '.join(complete_args)) def dump_stderr(): stderr.seek(0) sys.stderr.write(stderr.read()) try: try: exit_status = core.main(args=complete_args, **kwargs) if '--download' in args: # Let the progress reporter thread finish. time.sleep(.5) except SystemExit: if tolerate_error_exit_status: exit_status = ExitStatus.ERROR else: dump_stderr() raise except Exception: stderr.seek(0) sys.stderr.write(stderr.read()) raise else: if (not tolerate_error_exit_status and exit_status != ExitStatus.SUCCESS): dump_stderr() raise ExitStatusError( 'httpie.core.main() unexpectedly returned' f' a non-zero exit status: {exit_status}' ) stdout.seek(0) stderr.seek(0) devnull.seek(0) output = stdout.read() devnull_output = devnull.read() if hasattr(env, '_encoder'): output = env._encoder.decode(output) r = BaseCLIResponse.from_raw_data(output) try: devnull_output = devnull_output.decode() except Exception: pass r.devnull = devnull_output r.stderr = stderr.read() r.exit_status = exit_status r.args = args r.complete_args = ' '.join(complete_args) if r.exit_status != ExitStatus.SUCCESS: sys.stderr.write(r.stderr) # print(f'\n\n$ {r.command}\n') return r finally: env.cleanup()
We require some text, and continue to read until we find an ending or until the end of the string.
def expect_body(s: str) -> str: """ We require some text, and continue to read until we find an ending or until the end of the string. """ if 'content-disposition:' in s.lower(): # Multipart body heuristic. final_boundary_re = re.compile('\r\n--[^-]+?--\r\n') match = final_boundary_re.search(s) if match: return s[match.end():] endings = [s.index(sep) for sep in BODY_ENDINGS if sep in s] if not endings: s = '' # Only body else: end = min(endings) if end == 0: raise OutputMatchingError(f'Empty body: {s!r}') s = s[end:] return s
Check the command `output` for an exact full sequence of `tokens`. >>> out = 'GET / HTTP/1.1\r\nAAA:BBB\r\n\r\nCCC\n\n' >>> assert_output_matches(out, [Expect.REQUEST_HEADERS, Expect.BODY, Expect.SEPARATOR])
def assert_output_matches(output: str, tokens: Iterable[Expect]): r""" Check the command `output` for an exact full sequence of `tokens`. >>> out = 'GET / HTTP/1.1\r\nAAA:BBB\r\n\r\nCCC\n\n' >>> assert_output_matches(out, [Expect.REQUEST_HEADERS, Expect.BODY, Expect.SEPARATOR]) """ # TODO: auto-remove ansi colors to allow for testing of colorized output as well. expect_tokens(tokens=tokens, s=output)
>>> assert_output_does_not_match('\r\n', [Expect.BODY])
def assert_output_does_not_match(output: str, tokens: Iterable[Expect]): r""" >>> assert_output_does_not_match('\r\n', [Expect.BODY]) """ with pytest.raises(OutputMatchingError): assert_output_matches(output=output, tokens=tokens)
Callback that is used whenever --config is passed. We use this to always load the correct config. This means that the config is loaded even if the group itself never executes so our aliases stay always available.
def read_config(ctx, param, value): """Callback that is used whenever --config is passed. We use this to always load the correct config. This means that the config is loaded even if the group itself never executes so our aliases stay always available. """ cfg = ctx.ensure_object(Config) if value is None: value = os.path.join(os.path.dirname(__file__), "aliases.ini") cfg.read_config(value) return value
An example application that supports aliases.
def cli(): """An example application that supports aliases."""
Pushes changes.
def push(): """Pushes changes.""" click.echo("Push")
Pulls changes.
def pull(): """Pulls changes.""" click.echo("Pull")
Clones a repository.
def clone(): """Clones a repository.""" click.echo("Clone")
Commits pending changes.
def commit(): """Commits pending changes.""" click.echo("Commit")
Shows the status.
def status(config): """Shows the status.""" click.echo(f"Status for {config.path}")
Adds an alias to the specified configuration file.
def alias(config, alias_, cmd, config_file): """Adds an alias to the specified configuration file.""" config.add_alias(alias_, cmd) config.write_config(config_file) click.echo(f"Added '{alias_}' as alias for '{cmd}'")
This script prints some colors. It will also automatically remove all ANSI styles if data is piped into a file. Give it a try!
def cli(): """This script prints some colors. It will also automatically remove all ANSI styles if data is piped into a file. Give it a try! """ for color in all_colors: click.echo(click.style(f"I am colored {color}", fg=color)) for color in all_colors: click.echo(click.style(f"I am colored {color} and bold", fg=color, bold=True)) for color in all_colors: click.echo(click.style(f"I am reverse colored {color}", fg=color, reverse=True)) click.echo(click.style("I am blinking", blink=True)) click.echo(click.style("I am underlined", underline=True))
A complex command line interface.
def cli(ctx, verbose, home): """A complex command line interface.""" ctx.verbose = verbose if home is not None: ctx.home = home
Initializes a repository.
def cli(ctx, path): """Initializes a repository.""" if path is None: path = ctx.home ctx.log(f"Initialized the repository in {click.format_filename(path)}")
Shows file changes in the current working directory.
def cli(ctx): """Shows file changes in the current working directory.""" ctx.log("Changed files: none") ctx.vlog("bla bla bla, debug info")
This script processes a bunch of images through pillow in a unix pipe. One commands feeds into the next. Example:  imagepipe open -i example01.jpg resize -w 128 display imagepipe open -i example02.jpg blur save
def cli(): """This script processes a bunch of images through pillow in a unix pipe. One commands feeds into the next. Example: \b imagepipe open -i example01.jpg resize -w 128 display imagepipe open -i example02.jpg blur save """
This result callback is invoked with an iterable of all the chained subcommands. As in this example each subcommand returns a function we can chain them together to feed one into the other, similar to how a pipe on unix works.
def process_commands(processors): """This result callback is invoked with an iterable of all the chained subcommands. As in this example each subcommand returns a function we can chain them together to feed one into the other, similar to how a pipe on unix works. """ # Start with an empty iterable. stream = () # Pipe it through all stream processors. for processor in processors: stream = processor(stream) # Evaluate the stream and throw away the items. for _ in stream: pass
Helper decorator to rewrite a function so that it returns another function from it.
def processor(f): """Helper decorator to rewrite a function so that it returns another function from it. """ def new_func(*args, **kwargs): def processor(stream): return f(stream, *args, **kwargs) return processor return update_wrapper(new_func, f)
Similar to the :func:`processor` but passes through old values unchanged and does not pass through the values as parameter.
def generator(f): """Similar to the :func:`processor` but passes through old values unchanged and does not pass through the values as parameter. """ @processor def new_func(stream, *args, **kwargs): yield from stream yield from f(*args, **kwargs) return update_wrapper(new_func, f)
Loads one or multiple images for processing. The input parameter can be specified multiple times to load more than one image.
def open_cmd(images): """Loads one or multiple images for processing. The input parameter can be specified multiple times to load more than one image. """ for image in images: try: click.echo(f"Opening '{image}'") if image == "-": img = Image.open(click.get_binary_stdin()) img.filename = "-" else: img = Image.open(image) yield img except Exception as e: click.echo(f"Could not open image '{image}': {e}", err=True)