File size: 5,898 Bytes
d1ceb73 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 |
import ast
import itertools
import types
from collections import OrderedDict, Counter, defaultdict
from types import FrameType, TracebackType
from typing import (
Iterator, List, Tuple, Iterable, Callable, Union,
TypeVar, Mapping,
)
from asttokens import ASTText
T = TypeVar('T')
R = TypeVar('R')
def truncate(seq, max_length: int, middle):
if len(seq) > max_length:
right = (max_length - len(middle)) // 2
left = max_length - len(middle) - right
seq = seq[:left] + middle + seq[-right:]
return seq
def unique_in_order(it: Iterable[T]) -> List[T]:
return list(OrderedDict.fromkeys(it))
def line_range(atok: ASTText, node: ast.AST) -> Tuple[int, int]:
"""
Returns a pair of numbers representing a half open range
(i.e. suitable as arguments to the `range()` builtin)
of line numbers of the given AST nodes.
"""
if isinstance(node, getattr(ast, "match_case", ())):
start, _end = line_range(atok, node.pattern)
_start, end = line_range(atok, node.body[-1])
return start, end
else:
(start, _), (end, _) = atok.get_text_positions(node, padded=False)
return start, end + 1
def highlight_unique(lst: List[T]) -> Iterator[Tuple[T, bool]]:
counts = Counter(lst)
for is_common, group in itertools.groupby(lst, key=lambda x: counts[x] > 3):
if is_common:
group = list(group)
highlighted = [False] * len(group)
def highlight_index(f):
try:
i = f()
except ValueError:
return None
highlighted[i] = True
return i
for item in set(group):
first = highlight_index(lambda: group.index(item))
if first is not None:
highlight_index(lambda: group.index(item, first + 1))
highlight_index(lambda: -1 - group[::-1].index(item))
else:
highlighted = itertools.repeat(True)
yield from zip(group, highlighted)
def identity(x: T) -> T:
return x
def collapse_repeated(lst, *, collapser, mapper=identity, key=identity):
keyed = list(map(key, lst))
for is_highlighted, group in itertools.groupby(
zip(lst, highlight_unique(keyed)),
key=lambda t: t[1][1],
):
original_group, highlighted_group = zip(*group)
if is_highlighted:
yield from map(mapper, original_group)
else:
keyed_group, _ = zip(*highlighted_group)
yield collapser(list(original_group), list(keyed_group))
def is_frame(frame_or_tb: Union[FrameType, TracebackType]) -> bool:
assert_(isinstance(frame_or_tb, (types.FrameType, types.TracebackType)))
return isinstance(frame_or_tb, (types.FrameType,))
def iter_stack(frame_or_tb: Union[FrameType, TracebackType]) -> Iterator[Union[FrameType, TracebackType]]:
current: Union[FrameType, TracebackType, None] = frame_or_tb
while current:
yield current
if is_frame(current):
current = current.f_back
else:
current = current.tb_next
def frame_and_lineno(frame_or_tb: Union[FrameType, TracebackType]) -> Tuple[FrameType, int]:
if is_frame(frame_or_tb):
return frame_or_tb, frame_or_tb.f_lineno
else:
return frame_or_tb.tb_frame, frame_or_tb.tb_lineno
def group_by_key_func(iterable: Iterable[T], key_func: Callable[[T], R]) -> Mapping[R, List[T]]:
# noinspection PyUnresolvedReferences
"""
Create a dictionary from an iterable such that the keys are the result of evaluating a key function on elements
of the iterable and the values are lists of elements all of which correspond to the key.
>>> def si(d): return sorted(d.items())
>>> si(group_by_key_func("a bb ccc d ee fff".split(), len))
[(1, ['a', 'd']), (2, ['bb', 'ee']), (3, ['ccc', 'fff'])]
>>> si(group_by_key_func([-1, 0, 1, 3, 6, 8, 9, 2], lambda x: x % 2))
[(0, [0, 6, 8, 2]), (1, [-1, 1, 3, 9])]
"""
result = defaultdict(list)
for item in iterable:
result[key_func(item)].append(item)
return result
class cached_property(object):
"""
A property that is only computed once per instance and then replaces itself
with an ordinary attribute. Deleting the attribute resets the property.
Based on https://github.com/pydanny/cached-property/blob/master/cached_property.py
"""
def __init__(self, func):
self.__doc__ = func.__doc__
self.func = func
def cached_property_wrapper(self, obj, _cls):
if obj is None:
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
__get__ = cached_property_wrapper
def _pygmented_with_ranges(formatter, code, ranges):
import pygments
from pygments.lexers import get_lexer_by_name
class MyLexer(type(get_lexer_by_name("python3"))):
def get_tokens(self, text):
length = 0
for ttype, value in super().get_tokens(text):
if any(start <= length < end for start, end in ranges):
ttype = ttype.ExecutingNode
length += len(value)
yield ttype, value
lexer = MyLexer(stripnl=False)
try:
highlighted = pygments.highlight(code, lexer, formatter)
except Exception:
# When pygments fails, prefer code without highlighting over crashing
highlighted = code
return highlighted.splitlines()
def assert_(condition, error=""):
if not condition:
if isinstance(error, str):
error = AssertionError(error)
raise error
# Copied from the standard traceback module pre-3.11
def some_str(value):
try:
return str(value)
except:
return '<unprintable %s object>' % type(value).__name__
|