code
stringlengths
26
870k
docstring
stringlengths
1
65.6k
func_name
stringlengths
1
194
language
stringclasses
1 value
repo
stringlengths
8
68
path
stringlengths
5
194
url
stringlengths
46
254
license
stringclasses
4 values
def match(self, node, results=None): """Does this pattern exactly match a node?""" return self.match_seq([node], results)
Does this pattern exactly match a node?
match
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
MIT
def match_seq(self, nodes, results=None): """Does this pattern exactly match a sequence of nodes?""" for c, r in self.generate_matches(nodes): if c == len(nodes): if results is not None: results.update(r) if self.name: results[self.name] = list(nodes) return True return False
Does this pattern exactly match a sequence of nodes?
match_seq
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
MIT
def generate_matches(self, nodes): """ Generator yielding matches for a sequence of nodes. Args: nodes: sequence of nodes Yields: (count, results) tuples where: count: the match comprises nodes[:count]; results: dict containing named submatches. """ if self.content is None: # Shortcut for special case (see __init__.__doc__) for count in range(self.min, 1 + min(len(nodes), self.max)): r = {} if self.name: r[self.name] = nodes[:count] yield count, r elif self.name == "bare_name": yield self._bare_name_matches(nodes) else: # The reason for this is that hitting the recursion limit usually # results in some ugly messages about how RuntimeErrors are being # ignored. We only have to do this on CPython, though, because other # implementations don't have this nasty bug in the first place. if hasattr(sys, "getrefcount"): save_stderr = sys.stderr sys.stderr = StringIO() try: for count, r in self._recursive_matches(nodes, 0): if self.name: r[self.name] = nodes[:count] yield count, r except RuntimeError: # We fall back to the iterative pattern matching scheme if the recursive # scheme hits the recursion limit. for count, r in self._iterative_matches(nodes): if self.name: r[self.name] = nodes[:count] yield count, r finally: if hasattr(sys, "getrefcount"): sys.stderr = save_stderr
Generator yielding matches for a sequence of nodes. Args: nodes: sequence of nodes Yields: (count, results) tuples where: count: the match comprises nodes[:count]; results: dict containing named submatches.
generate_matches
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
MIT
def _iterative_matches(self, nodes): """Helper to iteratively yield the matches.""" nodelen = len(nodes) if 0 >= self.min: yield 0, {} results = [] # generate matches that use just one alt from self.content for alt in self.content: for c, r in generate_matches(alt, nodes): yield c, r results.append((c, r)) # for each match, iterate down the nodes while results: new_results = [] for c0, r0 in results: # stop if the entire set of nodes has been matched if c0 < nodelen and c0 <= self.max: for alt in self.content: for c1, r1 in generate_matches(alt, nodes[c0:]): if c1 > 0: r = {} r.update(r0) r.update(r1) yield c0 + c1, r new_results.append((c0 + c1, r)) results = new_results
Helper to iteratively yield the matches.
_iterative_matches
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
MIT
def _bare_name_matches(self, nodes): """Special optimized matcher for bare_name.""" count = 0 r = {} done = False max = len(nodes) while not done and count < max: done = True for leaf in self.content: if leaf[0].match(nodes[count], r): count += 1 done = False break r[self.name] = nodes[:count] return count, r
Special optimized matcher for bare_name.
_bare_name_matches
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
MIT
def _recursive_matches(self, nodes, count): """Helper to recursively yield the matches.""" assert self.content is not None if count >= self.min: yield 0, {} if count < self.max: for alt in self.content: for c0, r0 in generate_matches(alt, nodes): for c1, r1 in self._recursive_matches(nodes[c0:], count+1): r = {} r.update(r0) r.update(r1) yield c0 + c1, r
Helper to recursively yield the matches.
_recursive_matches
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
MIT
def __init__(self, content=None): """ Initializer. The argument is either a pattern or None. If it is None, this only matches an empty sequence (effectively '$' in regex lingo). If it is not None, this matches whenever the argument pattern doesn't have any matches. """ if content is not None: assert isinstance(content, BasePattern), repr(content) self.content = content
Initializer. The argument is either a pattern or None. If it is None, this only matches an empty sequence (effectively '$' in regex lingo). If it is not None, this matches whenever the argument pattern doesn't have any matches.
__init__
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
MIT
def generate_matches(patterns, nodes): """ Generator yielding matches for a sequence of patterns and nodes. Args: patterns: a sequence of patterns nodes: a sequence of nodes Yields: (count, results) tuples where: count: the entire sequence of patterns matches nodes[:count]; results: dict containing named submatches. """ if not patterns: yield 0, {} else: p, rest = patterns[0], patterns[1:] for c0, r0 in p.generate_matches(nodes): if not rest: yield c0, r0 else: for c1, r1 in generate_matches(rest, nodes[c0:]): r = {} r.update(r0) r.update(r1) yield c0 + c1, r
Generator yielding matches for a sequence of patterns and nodes. Args: patterns: a sequence of patterns nodes: a sequence of nodes Yields: (count, results) tuples where: count: the entire sequence of patterns matches nodes[:count]; results: dict containing named submatches.
generate_matches
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pytree.py
MIT
def add_fixer(self, fixer): """Reduces a fixer's pattern tree to a linear path and adds it to the matcher(a common Aho-Corasick automaton). The fixer is appended on the matching states and called when they are reached""" self.fixers.append(fixer) tree = reduce_tree(fixer.pattern_tree) linear = tree.get_linear_subpattern() match_nodes = self.add(linear, start=self.root) for match_node in match_nodes: match_node.fixers.append(fixer)
Reduces a fixer's pattern tree to a linear path and adds it to the matcher(a common Aho-Corasick automaton). The fixer is appended on the matching states and called when they are reached
add_fixer
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/btm_matcher.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/btm_matcher.py
MIT
def run(self, leaves): """The main interface with the bottom matcher. The tree is traversed from the bottom using the constructed automaton. Nodes are only checked once as the tree is retraversed. When the automaton fails, we give it one more shot(in case the above tree matches as a whole with the rejected leaf), then we break for the next leaf. There is the special case of multiple arguments(see code comments) where we recheck the nodes Args: The leaves of the AST tree to be matched Returns: A dictionary of node matches with fixers as the keys """ current_ac_node = self.root results = defaultdict(list) for leaf in leaves: current_ast_node = leaf while current_ast_node: current_ast_node.was_checked = True for child in current_ast_node.children: # multiple statements, recheck if isinstance(child, pytree.Leaf) and child.value == ";": current_ast_node.was_checked = False break if current_ast_node.type == 1: #name node_token = current_ast_node.value else: node_token = current_ast_node.type if node_token in current_ac_node.transition_table: #token matches current_ac_node = current_ac_node.transition_table[node_token] for fixer in current_ac_node.fixers: results[fixer].append(current_ast_node) else: #matching failed, reset automaton current_ac_node = self.root if (current_ast_node.parent is not None and current_ast_node.parent.was_checked): #the rest of the tree upwards has been checked, next leaf break #recheck the rejected node once from the root if node_token in current_ac_node.transition_table: #token matches current_ac_node = current_ac_node.transition_table[node_token] for fixer in current_ac_node.fixers: results[fixer].append(current_ast_node) current_ast_node = current_ast_node.parent return results
The main interface with the bottom matcher. The tree is traversed from the bottom using the constructed automaton. Nodes are only checked once as the tree is retraversed. When the automaton fails, we give it one more shot(in case the above tree matches as a whole with the rejected leaf), then we break for the next leaf. There is the special case of multiple arguments(see code comments) where we recheck the nodes Args: The leaves of the AST tree to be matched Returns: A dictionary of node matches with fixers as the keys
run
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/btm_matcher.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/btm_matcher.py
MIT
def __init__(self, grammar): """Initializer. Creates an attribute for each grammar symbol (nonterminal), whose value is the symbol's type (an int >= 256). """ for name, symbol in grammar.symbol2number.items(): setattr(self, name, symbol)
Initializer. Creates an attribute for each grammar symbol (nonterminal), whose value is the symbol's type (an int >= 256).
__init__
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pygram.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pygram.py
MIT
def get_refactorer(fixer_pkg="lib2to3", fixers=None, options=None): """ A convenience function for creating a RefactoringTool for tests. fixers is a list of fixers for the RefactoringTool to use. By default "lib2to3.fixes.*" is used. options is an optional dictionary of options to be passed to the RefactoringTool. """ if fixers is not None: fixers = [fixer_pkg + ".fixes.fix_" + fix for fix in fixers] else: fixers = refactor.get_fixers_from_package(fixer_pkg + ".fixes") options = options or {} return refactor.RefactoringTool(fixers, options, explicit=True)
A convenience function for creating a RefactoringTool for tests. fixers is a list of fixers for the RefactoringTool to use. By default "lib2to3.fixes.*" is used. options is an optional dictionary of options to be passed to the RefactoringTool.
get_refactorer
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/support.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/support.py
MIT
def test_refactor_docstring(self): rt = self.rt() doc = """ >>> example() 42 """ out = rt.refactor_docstring(doc, "<test>") self.assertEqual(out, doc) doc = """ >>> def parrot(): ... return 43 """ out = rt.refactor_docstring(doc, "<test>") self.assertNotEqual(out, doc)
out = rt.refactor_docstring(doc, "<test>") self.assertEqual(out, doc) doc =
test_refactor_docstring
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_refactor.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_refactor.py
MIT
def setup_test_source_trees(self): """Setup a test source tree and output destination tree.""" self.temp_dir = tempfile.mkdtemp() # tearDown() cleans this up. self.py2_src_dir = os.path.join(self.temp_dir, "python2_project") self.py3_dest_dir = os.path.join(self.temp_dir, "python3_project") os.mkdir(self.py2_src_dir) os.mkdir(self.py3_dest_dir) # Turn it into a package with a few files. self.setup_files = [] open(os.path.join(self.py2_src_dir, "__init__.py"), "w").close() self.setup_files.append("__init__.py") shutil.copy(PY2_TEST_MODULE, self.py2_src_dir) self.setup_files.append(os.path.basename(PY2_TEST_MODULE)) self.trivial_py2_file = os.path.join(self.py2_src_dir, "trivial.py") self.init_py2_file = os.path.join(self.py2_src_dir, "__init__.py") with open(self.trivial_py2_file, "w") as trivial: trivial.write("print 'I need a simple conversion.'") self.setup_files.append("trivial.py")
Setup a test source tree and output destination tree.
setup_test_source_trees
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_main.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_main.py
MIT
def test_filename_changing_on_output_single_dir(self): """2to3 a single directory with a new output dir and suffix.""" self.setup_test_source_trees() out = io.StringIO() err = io.StringIO() suffix = "TEST" ret = self.run_2to3_capture( ["-n", "--add-suffix", suffix, "--write-unchanged-files", "--no-diffs", "--output-dir", self.py3_dest_dir, self.py2_src_dir], io.StringIO(""), out, err) self.assertEqual(ret, 0) stderr = err.getvalue() self.assertIn(" implies -w.", stderr) self.assertIn( "Output in %r will mirror the input directory %r layout" % ( self.py3_dest_dir, self.py2_src_dir), stderr) self.assertEqual(set(name+suffix for name in self.setup_files), set(os.listdir(self.py3_dest_dir))) for name in self.setup_files: self.assertIn("Writing converted %s to %s" % ( os.path.join(self.py2_src_dir, name), os.path.join(self.py3_dest_dir, name+suffix)), stderr) sep = re.escape(os.sep) self.assertRegex( stderr, r"No changes to .*/__init__\.py".replace("/", sep)) self.assertNotRegex( stderr, r"No changes to .*/trivial\.py".replace("/", sep))
2to3 a single directory with a new output dir and suffix.
test_filename_changing_on_output_single_dir
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_main.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_main.py
MIT
def test_filename_changing_on_output_two_files(self): """2to3 two files in one directory with a new output dir.""" self.setup_test_source_trees() err = io.StringIO() py2_files = [self.trivial_py2_file, self.init_py2_file] expected_files = set(os.path.basename(name) for name in py2_files) ret = self.run_2to3_capture( ["-n", "-w", "--write-unchanged-files", "--no-diffs", "--output-dir", self.py3_dest_dir] + py2_files, io.StringIO(""), io.StringIO(), err) self.assertEqual(ret, 0) stderr = err.getvalue() self.assertIn( "Output in %r will mirror the input directory %r layout" % ( self.py3_dest_dir, self.py2_src_dir), stderr) self.assertEqual(expected_files, set(os.listdir(self.py3_dest_dir)))
2to3 two files in one directory with a new output dir.
test_filename_changing_on_output_two_files
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_main.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_main.py
MIT
def test_filename_changing_on_output_single_file(self): """2to3 a single file with a new output dir.""" self.setup_test_source_trees() err = io.StringIO() ret = self.run_2to3_capture( ["-n", "-w", "--no-diffs", "--output-dir", self.py3_dest_dir, self.trivial_py2_file], io.StringIO(""), io.StringIO(), err) self.assertEqual(ret, 0) stderr = err.getvalue() self.assertIn( "Output in %r will mirror the input directory %r layout" % ( self.py3_dest_dir, self.py2_src_dir), stderr) self.assertEqual(set([os.path.basename(self.trivial_py2_file)]), set(os.listdir(self.py3_dest_dir)))
2to3 a single file with a new output dir.
test_filename_changing_on_output_single_file
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_main.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_main.py
MIT
def test_one_line_suites(self): b = """ try: raise TypeError except TypeError, e: pass """ a = """ try: raise TypeError except TypeError as e: pass """ self.check(b, a) b = """ try: raise TypeError except TypeError, e: pass """ a = """ try: raise TypeError except TypeError as e: pass """ self.check(b, a) b = """ try: raise TypeError except TypeError, e: pass """ a = """ try: raise TypeError except TypeError as e: pass """ self.check(b, a) b = """ try: raise TypeError except TypeError, e: pass else: function() finally: done() """ a = """ try: raise TypeError except TypeError as e: pass else: function() finally: done() """ self.check(b, a)
a =
test_one_line_suites
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_import_module_usage(self): for old, new in self.modules.items(): b = """ import %s foo(%s.bar) """ % (old, old) a = """ import %s foo(%s.bar) """ % (new, new) self.check(b, a) b = """ from %s import x %s = 23 """ % (old, old) a = """ from %s import x %s = 23 """ % (new, old) self.check(b, a) s = """ def f(): %s.method() """ % (old,) self.unchanged(s) # test nested usage b = """ import %s %s.bar(%s.foo) """ % (old, old, old) a = """ import %s %s.bar(%s.foo) """ % (new, new, new) self.check(b, a) b = """ import %s x.%s """ % (old, old) a = """ import %s x.%s """ % (new, old) self.check(b, a)
% (old, old) a =
test_import_module_usage
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_multiple_imports_as(self): b = """ import copy_reg as bar, HTMLParser as foo, urlparse s = urlparse.spam(bar.foo()) """ a = """ import copyreg as bar, html.parser as foo, urllib.parse s = urllib.parse.spam(bar.foo()) """ self.check(b, a)
a =
test_multiple_imports_as
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_indented(self): b = """ def foo(): from urllib import urlencode, urlopen """ a = """ def foo(): from urllib.parse import urlencode from urllib.request import urlopen """ self.check(b, a) b = """ def foo(): other() from urllib import urlencode, urlopen """ a = """ def foo(): other() from urllib.parse import urlencode from urllib.request import urlopen """ self.check(b, a)
a =
test_indented
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_import_module_usage(self): for old, changes in self.modules.items(): for new, members in changes: for member in members: new_import = ", ".join([n for (n, mems) in self.modules[old]]) b = """ import %s foo(%s.%s) """ % (old, old, member) a = """ import %s foo(%s.%s) """ % (new_import, new, member) self.check(b, a) b = """ import %s %s.%s(%s.%s) """ % (old, old, member, old, member) a = """ import %s %s.%s(%s.%s) """ % (new_import, new, member, new, member) self.check(b, a)
% (old, old, member) a =
test_import_module_usage
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_prefix_preservation_1(self): b = """ for a in b: foo(a) a.next() """ a = """ for a in b: foo(a) next(a) """ self.check(b, a)
a =
test_prefix_preservation_1
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_prefix_preservation_2(self): b = """ for a in b: foo(a) # abc # def a.next() """ a = """ for a in b: foo(a) # abc # def next(a) """ self.check(b, a)
a =
test_prefix_preservation_2
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_prefix_preservation_3(self): b = """ next = 5 for a in b: foo(a) a.next() """ a = """ next = 5 for a in b: foo(a) a.__next__() """ self.check(b, a, ignore_warnings=True)
a =
test_prefix_preservation_3
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_prefix_preservation_4(self): b = """ next = 5 for a in b: foo(a) # abc # def a.next() """ a = """ next = 5 for a in b: foo(a) # abc # def a.__next__() """ self.check(b, a, ignore_warnings=True)
a =
test_prefix_preservation_4
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_prefix_preservation_5(self): b = """ next = 5 for a in b: foo(foo(a), # abc a.next()) """ a = """ next = 5 for a in b: foo(foo(a), # abc a.__next__()) """ self.check(b, a, ignore_warnings=True)
a =
test_prefix_preservation_5
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_prefix_preservation_6(self): b = """ for a in b: foo(foo(a), # abc a.next()) """ a = """ for a in b: foo(foo(a), # abc next(a)) """ self.check(b, a)
a =
test_prefix_preservation_6
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_method_1(self): b = """ class A: def next(self): pass """ a = """ class A: def __next__(self): pass """ self.check(b, a)
a =
test_method_1
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_method_2(self): b = """ class A(object): def next(self): pass """ a = """ class A(object): def __next__(self): pass """ self.check(b, a)
a =
test_method_2
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_method_3(self): b = """ class A: def next(x): pass """ a = """ class A: def __next__(x): pass """ self.check(b, a)
a =
test_method_3
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_method_4(self): b = """ class A: def __init__(self, foo): self.foo = foo def next(self): pass def __iter__(self): return self """ a = """ class A: def __init__(self, foo): self.foo = foo def __next__(self): pass def __iter__(self): return self """ self.check(b, a)
a =
test_method_4
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_shadowing_funcdef_2(self): b = """ def next(a): pass class A: def next(self): pass it.next() """ a = """ def next(a): pass class A: def __next__(self): pass it.__next__() """ self.warns(b, a, "Calls to builtin next() possibly shadowed")
a =
test_shadowing_funcdef_2
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_1(self): b = """ class A: def __nonzero__(self): pass """ a = """ class A: def __bool__(self): pass """ self.check(b, a)
a =
test_1
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_2(self): b = """ class A(object): def __nonzero__(self): pass """ a = """ class A(object): def __bool__(self): pass """ self.check(b, a)
a =
test_2
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_import_module_usage(self): for mod, (old, new) in list(self.modules.items()): b = """ import %s foo(%s, %s.%s) """ % (mod, mod, mod, old) a = """ import %s foo(%s, %s.%s) """ % (mod, mod, mod, new) self.check(b, a)
% (mod, mod, mod, old) a =
test_import_module_usage
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def XXX_test_from_import_usage(self): # not implemented yet for mod, (old, new) in list(self.modules.items()): b = """ from %s import %s foo(%s, %s) """ % (mod, old, mod, old) a = """ from %s import %s foo(%s, %s) """ % (mod, new, mod, new) self.check(b, a)
% (mod, old, mod, old) a =
XXX_test_from_import_usage
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_map_basic(self): b = """x = map(f, 'abc')""" a = """x = list(map(f, 'abc'))""" self.check(b, a) b = """x = len(map(f, 'abc', 'def'))""" a = """x = len(list(map(f, 'abc', 'def')))""" self.check(b, a) b = """x = map(None, 'abc')""" a = """x = list('abc')""" self.check(b, a) b = """x = map(lambda x: x+1, range(4))""" a = """x = [x+1 for x in range(4)]""" self.check(b, a) # Note the parens around x b = """x = map(lambda (x): x+1, range(4))""" a = """x = [x+1 for x in range(4)]""" self.check(b, a) b = """ foo() # foo map(f, x) """ a = """ foo() # foo list(map(f, x)) """ self.warns(b, a, "You should use a for loop here")
a =
test_map_basic
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_while(self): b = """while 1: foo()""" a = """while True: foo()""" self.check(b, a) b = """while 1: foo()""" a = """while True: foo()""" self.check(b, a) b = """ while 1: foo() """ a = """ while True: foo() """ self.check(b, a)
a =
test_while
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_sort_list_call(self): b = """ v = list(t) v.sort() foo(v) """ a = """ v = sorted(t) foo(v) """ self.check(b, a) b = """ v = list(foo(b) + d) v.sort() foo(v) """ a = """ v = sorted(foo(b) + d) foo(v) """ self.check(b, a) b = """ while x: v = list(t) v.sort() foo(v) """ a = """ while x: v = sorted(t) foo(v) """ self.check(b, a) b = """ v = list(t) # foo v.sort() foo(v) """ a = """ v = sorted(t) # foo foo(v) """ self.check(b, a) b = r""" v = list( t) v.sort() foo(v) """ a = r""" v = sorted( t) foo(v) """ self.check(b, a) b = r""" try: m = list(s) m.sort() except: pass """ a = r""" try: m = sorted(s) except: pass """ self.check(b, a) b = r""" try: m = list(s) # foo m.sort() except: pass """ a = r""" try: m = sorted(s) # foo except: pass """ self.check(b, a) b = r""" m = list(s) # more comments m.sort()""" a = r""" m = sorted(s) # more comments""" self.check(b, a)
a =
test_sort_list_call
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_sort_simple_expr(self): b = """ v = t v.sort() foo(v) """ a = """ v = sorted(t) foo(v) """ self.check(b, a) b = """ v = foo(b) v.sort() foo(v) """ a = """ v = sorted(foo(b)) foo(v) """ self.check(b, a) b = """ v = b.keys() v.sort() foo(v) """ a = """ v = sorted(b.keys()) foo(v) """ self.check(b, a) b = """ v = foo(b) + d v.sort() foo(v) """ a = """ v = sorted(foo(b) + d) foo(v) """ self.check(b, a) b = """ while x: v = t v.sort() foo(v) """ a = """ while x: v = sorted(t) foo(v) """ self.check(b, a) b = """ v = t # foo v.sort() foo(v) """ a = """ v = sorted(t) # foo foo(v) """ self.check(b, a) b = r""" v = t v.sort() foo(v) """ a = r""" v = sorted(t) foo(v) """ self.check(b, a)
a =
test_sort_simple_expr
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_sort_unchanged(self): s = """ v = list(t) w.sort() foo(w) """ self.unchanged(s) s = """ v = list(t) v.sort(u) foo(v) """ self.unchanged(s)
self.unchanged(s) s =
test_sort_unchanged
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_prefix(self): b = """ # prefix import foo.bar """ a = """ # prefix from . import foo.bar """ self.check_both(b, a)
a =
test_prefix
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_whitespace(self): b = """set( [1, 2])""" a = """{1, 2}""" self.check(b, a) b = """set([1 , 2])""" a = """{1 , 2}""" self.check(b, a) b = """set([ 1 ])""" a = """{ 1 }""" self.check(b, a) b = """set( [1] )""" a = """{1}""" self.check(b, a) b = """set([ 1, 2 ])""" a = """{ 1, 2 }""" self.check(b, a) b = """set([x for x in y ])""" a = """{x for x in y }""" self.check(b, a) b = """set( [1, 2] ) """ a = """{1, 2}\n""" self.check(b, a)
a = """{1, 2}\n
test_whitespace
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_comments(self): b = """set((1, 2)) # Hi""" a = """{1, 2} # Hi""" self.check(b, a) # This isn't optimal behavior, but the fixer is optional. b = """ # Foo set( # Bar (1, 2) ) """ a = """ # Foo {1, 2} """ self.check(b, a)
a =
test_comments
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_unchanged(self): self.unchanged("class X(): pass") self.unchanged("class X(object): pass") self.unchanged("class X(object1, object2): pass") self.unchanged("class X(object1, object2, object3): pass") self.unchanged("class X(metaclass=Meta): pass") self.unchanged("class X(b, arg=23, metclass=Meta): pass") self.unchanged("class X(b, arg=23, metaclass=Meta, other=42): pass") s = """ class X: def __metaclass__(self): pass """ self.unchanged(s) s = """ class X: a[23] = 74 """ self.unchanged(s)
self.unchanged(s) s =
test_unchanged
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_comments(self): b = """ class X: # hi __metaclass__ = AppleMeta """ a = """ class X(metaclass=AppleMeta): # hi pass """ self.check(b, a) b = """ class X: __metaclass__ = Meta # Bedtime! """ a = """ class X(metaclass=Meta): pass # Bedtime! """ self.check(b, a)
a =
test_comments
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_meta(self): # no-parent class, odd body b = """ class X(): __metaclass__ = Q pass """ a = """ class X(metaclass=Q): pass """ self.check(b, a) # one parent class, no body b = """class X(object): __metaclass__ = Q""" a = """class X(object, metaclass=Q): pass""" self.check(b, a) # one parent, simple body b = """ class X(object): __metaclass__ = Meta bar = 7 """ a = """ class X(object, metaclass=Meta): bar = 7 """ self.check(b, a) b = """ class X: __metaclass__ = Meta; x = 4; g = 23 """ a = """ class X(metaclass=Meta): x = 4; g = 23 """ self.check(b, a) # one parent, simple body, __metaclass__ last b = """ class X(object): bar = 7 __metaclass__ = Meta """ a = """ class X(object, metaclass=Meta): bar = 7 """ self.check(b, a) # redefining __metaclass__ b = """ class X(): __metaclass__ = A __metaclass__ = B bar = 7 """ a = """ class X(metaclass=B): bar = 7 """ self.check(b, a) # multiple inheritance, simple body b = """ class X(clsA, clsB): __metaclass__ = Meta bar = 7 """ a = """ class X(clsA, clsB, metaclass=Meta): bar = 7 """ self.check(b, a) # keywords in the class statement b = """class m(a, arg=23): __metaclass__ = Meta""" a = """class m(a, arg=23, metaclass=Meta): pass""" self.check(b, a) b = """ class X(expression(2 + 4)): __metaclass__ = Meta """ a = """ class X(expression(2 + 4), metaclass=Meta): pass """ self.check(b, a) b = """ class X(expression(2 + 4), x**4): __metaclass__ = Meta """ a = """ class X(expression(2 + 4), x**4, metaclass=Meta): pass """ self.check(b, a) b = """ class X: __metaclass__ = Meta save.py = 23 """ a = """ class X(metaclass=Meta): save.py = 23 """ self.check(b, a)
a =
test_meta
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_indentation(self): b = """ if 1: os.getcwdu() """ a = """ if 1: os.getcwd() """ self.check(b, a)
a =
test_indentation
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_simple(self): b = """ import sys sys.exitfunc = my_atexit """ a = """ import sys import atexit atexit.register(my_atexit) """ self.check(b, a)
a =
test_simple
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_names_import(self): b = """ import sys, crumbs sys.exitfunc = my_func """ a = """ import sys, crumbs, atexit atexit.register(my_func) """ self.check(b, a)
a =
test_names_import
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_complex_expression(self): b = """ import sys sys.exitfunc = do(d)/a()+complex(f=23, g=23)*expression """ a = """ import sys import atexit atexit.register(do(d)/a()+complex(f=23, g=23)*expression) """ self.check(b, a)
a =
test_complex_expression
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_comments(self): b = """ import sys # Foo sys.exitfunc = f # Blah """ a = """ import sys import atexit # Foo atexit.register(f) # Blah """ self.check(b, a) b = """ import apples, sys, crumbs, larry # Pleasant comments sys.exitfunc = func """ a = """ import apples, sys, crumbs, larry, atexit # Pleasant comments atexit.register(func) """ self.check(b, a)
a =
test_comments
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def test_in_a_function(self): b = """ import sys def f(): sys.exitfunc = func """ a = """ import sys import atexit def f(): atexit.register(func) """ self.check(b, a)
a =
test_in_a_function
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_fixers.py
MIT
def _Call(self, name, args=None, prefix=None): """Help the next test""" children = [] if isinstance(args, list): for arg in args: children.append(arg) children.append(Comma()) children.pop() return Call(Name(name), children, prefix)
Help the next test
_Call
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_util.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/test_util.py
MIT
def testStringLiterals(self): x = ''; y = ""; self.assert_(len(x) == 0 and x == y) x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39) x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34) x = "doesn't \"shrink\" does it" y = 'doesn\'t "shrink" does it' self.assert_(len(x) == 24 and x == y) x = "does \"shrink\" doesn't it" y = 'does "shrink" doesn\'t it' self.assert_(len(x) == 24 and x == y) x = """ The "quick" brown fox jumps over the 'lazy' dog. """ y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n' self.assertEquals(x, y) y = ''' The "quick" brown fox jumps over the 'lazy' dog. ''' self.assertEquals(x, y) y = "\n\ The \"quick\"\n\ brown fox\n\ jumps over\n\ the 'lazy' dog.\n\ " self.assertEquals(x, y) y = '\n\ The \"quick\"\n\ brown fox\n\ jumps over\n\ the \'lazy\' dog.\n\ ' self.assertEquals(x, y) x = rf"hello \{True}"; y = f"hello \\{True}" self.assertEquals(x, y)
y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n' self.assertEquals(x, y) y =
testStringLiterals
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/data/py3_test_grammar.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/data/py3_test_grammar.py
MIT
def testStringLiterals(self): x = ''; y = ""; self.assert_(len(x) == 0 and x == y) x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39) x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34) x = "doesn't \"shrink\" does it" y = 'doesn\'t "shrink" does it' self.assert_(len(x) == 24 and x == y) x = "does \"shrink\" doesn't it" y = 'does "shrink" doesn\'t it' self.assert_(len(x) == 24 and x == y) x = """ The "quick" brown fox jumps over the 'lazy' dog. """ y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n' self.assertEquals(x, y) y = ''' The "quick" brown fox jumps over the 'lazy' dog. ''' self.assertEquals(x, y) y = "\n\ The \"quick\"\n\ brown fox\n\ jumps over\n\ the 'lazy' dog.\n\ " self.assertEquals(x, y) y = '\n\ The \"quick\"\n\ brown fox\n\ jumps over\n\ the \'lazy\' dog.\n\ ' self.assertEquals(x, y)
y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n' self.assertEquals(x, y) y =
testStringLiterals
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/data/py2_test_grammar.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/tests/data/py2_test_grammar.py
MIT
def parse_tokens(self, tokens, debug=False): """Parse a series of tokens and return the syntax tree.""" # XXX Move the prefix computation into a wrapper around tokenize. p = parse.Parser(self.grammar, self.convert) p.setup() lineno = 1 column = 0 type = value = start = end = line_text = None prefix = "" for quintuple in tokens: type, value, start, end, line_text = quintuple if start != (lineno, column): assert (lineno, column) <= start, ((lineno, column), start) s_lineno, s_column = start if lineno < s_lineno: prefix += "\n" * (s_lineno - lineno) lineno = s_lineno column = 0 if column < s_column: prefix += line_text[column:s_column] column = s_column if type in (tokenize.COMMENT, tokenize.NL): prefix += value lineno, column = end if value.endswith("\n"): lineno += 1 column = 0 continue if type == token.OP: type = grammar.opmap[value] if debug: self.logger.debug("%s %r (prefix=%r)", token.tok_name[type], value, prefix) if p.addtoken(type, value, (prefix, start)): if debug: self.logger.debug("Stop.") break prefix = "" lineno, column = end if value.endswith("\n"): lineno += 1 column = 0 else: # We never broke out -- EOF is too soon (how can this happen???) raise parse.ParseError("incomplete input", type, value, (prefix, start)) return p.rootnode
Parse a series of tokens and return the syntax tree.
parse_tokens
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
MIT
def parse_stream_raw(self, stream, debug=False): """Parse a stream and return the syntax tree.""" tokens = tokenize.generate_tokens(stream.readline) return self.parse_tokens(tokens, debug)
Parse a stream and return the syntax tree.
parse_stream_raw
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
MIT
def parse_stream(self, stream, debug=False): """Parse a stream and return the syntax tree.""" return self.parse_stream_raw(stream, debug)
Parse a stream and return the syntax tree.
parse_stream
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
MIT
def parse_file(self, filename, encoding=None, debug=False): """Parse a file and return the syntax tree.""" with io.open(filename, "r", encoding=encoding) as stream: return self.parse_stream(stream, debug)
Parse a file and return the syntax tree.
parse_file
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
MIT
def parse_string(self, text, debug=False): """Parse a string and return the syntax tree.""" tokens = tokenize.generate_tokens(io.StringIO(text).readline) return self.parse_tokens(tokens, debug)
Parse a string and return the syntax tree.
parse_string
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
MIT
def load_grammar(gt="Grammar.txt", gp=None, save=True, force=False, logger=None): """Load the grammar (maybe from a pickle).""" if logger is None: logger = logging.getLogger() gp = _generate_pickle_name(gt) if gp is None else gp if force or not _newer(gp, gt): logger.info("Generating grammar tables from %s", gt) g = pgen.generate_grammar(gt) if save: logger.info("Writing grammar tables to %s", gp) try: g.dump(gp) except OSError as e: logger.info("Writing failed: %s", e) else: g = grammar.Grammar() g.load(gp) return g
Load the grammar (maybe from a pickle).
load_grammar
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
MIT
def _newer(a, b): """Inquire whether file a was written since file b.""" if not os.path.exists(a): return False if not os.path.exists(b): return True return os.path.getmtime(a) >= os.path.getmtime(b)
Inquire whether file a was written since file b.
_newer
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
MIT
def load_packaged_grammar(package, grammar_source): """Normally, loads a pickled grammar by doing pkgutil.get_data(package, pickled_grammar) where *pickled_grammar* is computed from *grammar_source* by adding the Python version and using a ``.pickle`` extension. However, if *grammar_source* is an extant file, load_grammar(grammar_source) is called instead. This facilitates using a packaged grammar file when needed but preserves load_grammar's automatic regeneration behavior when possible. """ if os.path.isfile(grammar_source): return load_grammar(grammar_source) pickled_name = _generate_pickle_name(os.path.basename(grammar_source)) data = pkgutil.get_data(package, pickled_name) g = grammar.Grammar() g.loads(data) return g
Normally, loads a pickled grammar by doing pkgutil.get_data(package, pickled_grammar) where *pickled_grammar* is computed from *grammar_source* by adding the Python version and using a ``.pickle`` extension. However, if *grammar_source* is an extant file, load_grammar(grammar_source) is called instead. This facilitates using a packaged grammar file when needed but preserves load_grammar's automatic regeneration behavior when possible.
load_packaged_grammar
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
MIT
def main(*args): """Main program, when run as a script: produce grammar pickle files. Calls load_grammar for each argument, a path to a grammar text file. """ if not args: args = sys.argv[1:] logging.basicConfig(level=logging.INFO, stream=sys.stdout, format='%(message)s') for gt in args: load_grammar(gt, save=True, force=True) return True
Main program, when run as a script: produce grammar pickle files. Calls load_grammar for each argument, a path to a grammar text file.
main
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/driver.py
MIT
def tokenize(readline, tokeneater=printtoken): """ The tokenize() function accepts two parameters: one representing the input stream, and one providing an output mechanism for tokenize(). The first parameter, readline, must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. The second parameter, tokeneater, must also be a callable object. It is called once for each token, with five arguments, corresponding to the tuples generated by generate_tokens(). """ try: tokenize_loop(readline, tokeneater) except StopTokenizing: pass
The tokenize() function accepts two parameters: one representing the input stream, and one providing an output mechanism for tokenize(). The first parameter, readline, must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. The second parameter, tokeneater, must also be a callable object. It is called once for each token, with five arguments, corresponding to the tuples generated by generate_tokens().
tokenize
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/tokenize.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/tokenize.py
MIT
def _get_normal_name(orig_enc): """Imitates get_normal_name in tokenizer.c.""" # Only care about the first 12 characters. enc = orig_enc[:12].lower().replace("_", "-") if enc == "utf-8" or enc.startswith("utf-8-"): return "utf-8" if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): return "iso-8859-1" return orig_enc
Imitates get_normal_name in tokenizer.c.
_get_normal_name
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/tokenize.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/tokenize.py
MIT
def detect_encoding(readline): """ The detect_encoding() function is used to detect the encoding that should be used to decode a Python source file. It requires one argument, readline, in the same way as the tokenize() generator. It will call readline a maximum of twice, and return the encoding used (as a string) and a list of any lines (left as bytes) it has read in. It detects the encoding from the presence of a utf-8 bom or an encoding cookie as specified in pep-0263. If both a bom and a cookie are present, but disagree, a SyntaxError will be raised. If the encoding cookie is an invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, 'utf-8-sig' is returned. If no encoding is specified, then the default of 'utf-8' will be returned. """ bom_found = False encoding = None default = 'utf-8' def read_or_stop(): try: return readline() except StopIteration: return bytes() def find_cookie(line): try: line_string = line.decode('ascii') except UnicodeDecodeError: return None match = cookie_re.match(line_string) if not match: return None encoding = _get_normal_name(match.group(1)) try: codec = lookup(encoding) except LookupError: # This behaviour mimics the Python interpreter raise SyntaxError("unknown encoding: " + encoding) if bom_found: if codec.name != 'utf-8': # This behaviour mimics the Python interpreter raise SyntaxError('encoding problem: utf-8') encoding += '-sig' return encoding first = read_or_stop() if first.startswith(BOM_UTF8): bom_found = True first = first[3:] default = 'utf-8-sig' if not first: return default, [] encoding = find_cookie(first) if encoding: return encoding, [first] if not blank_re.match(first): return default, [first] second = read_or_stop() if not second: return default, [first] encoding = find_cookie(second) if encoding: return encoding, [first, second] return default, [first, second]
The detect_encoding() function is used to detect the encoding that should be used to decode a Python source file. It requires one argument, readline, in the same way as the tokenize() generator. It will call readline a maximum of twice, and return the encoding used (as a string) and a list of any lines (left as bytes) it has read in. It detects the encoding from the presence of a utf-8 bom or an encoding cookie as specified in pep-0263. If both a bom and a cookie are present, but disagree, a SyntaxError will be raised. If the encoding cookie is an invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, 'utf-8-sig' is returned. If no encoding is specified, then the default of 'utf-8' will be returned.
detect_encoding
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/tokenize.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/tokenize.py
MIT
def untokenize(iterable): """Transform tokens back into Python source code. Each element returned by the iterable must be a token sequence with at least two elements, a token number and token value. If only two tokens are passed, the resulting output is poor. Round-trip invariant for full input: Untokenized source will match input source exactly Round-trip invariant for limited input: # Output text will tokenize the back to the input t1 = [tok[:2] for tok in generate_tokens(f.readline)] newcode = untokenize(t1) readline = iter(newcode.splitlines(1)).next t2 = [tok[:2] for tokin generate_tokens(readline)] assert t1 == t2 """ ut = Untokenizer() return ut.untokenize(iterable)
Transform tokens back into Python source code. Each element returned by the iterable must be a token sequence with at least two elements, a token number and token value. If only two tokens are passed, the resulting output is poor. Round-trip invariant for full input: Untokenized source will match input source exactly Round-trip invariant for limited input: # Output text will tokenize the back to the input t1 = [tok[:2] for tok in generate_tokens(f.readline)] newcode = untokenize(t1) readline = iter(newcode.splitlines(1)).next t2 = [tok[:2] for tokin generate_tokens(readline)] assert t1 == t2
untokenize
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/tokenize.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/tokenize.py
MIT
def generate_tokens(readline): """ The generate_tokens() generator requires one argument, readline, which must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. Alternately, readline can be a callable function terminating with StopIteration: readline = open(myfile).next # Example of alternate readline The generator produces 5-tuples with these members: the token type; the token string; a 2-tuple (srow, scol) of ints specifying the row and column where the token begins in the source; a 2-tuple (erow, ecol) of ints specifying the row and column where the token ends in the source; and the line on which the token was found. The line passed is the logical line; continuation lines are included. """ lnum = parenlev = continued = 0 contstr, needcont = '', 0 contline = None indents = [0] # 'stashed' and 'async_*' are used for async/await parsing stashed = None async_def = False async_def_indent = 0 async_def_nl = False while 1: # loop over lines in stream try: line = readline() except StopIteration: line = '' lnum = lnum + 1 pos, max = 0, len(line) if contstr: # continued string if not line: raise TokenError("EOF in multi-line string", strstart) endmatch = endprog.match(line) if endmatch: pos = end = endmatch.end(0) yield (STRING, contstr + line[:end], strstart, (lnum, end), contline + line) contstr, needcont = '', 0 contline = None elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n': yield (ERRORTOKEN, contstr + line, strstart, (lnum, len(line)), contline) contstr = '' contline = None continue else: contstr = contstr + line contline = contline + line continue elif parenlev == 0 and not continued: # new statement if not line: break column = 0 while pos < max: # measure leading whitespace if line[pos] == ' ': column = column + 1 elif line[pos] == '\t': column = (column//tabsize + 1)*tabsize elif line[pos] == '\f': column = 0 else: break pos = pos + 1 if pos == max: break if stashed: yield stashed stashed = None if line[pos] in '#\r\n': # skip comments or blank lines if line[pos] == '#': comment_token = line[pos:].rstrip('\r\n') nl_pos = pos + len(comment_token) yield (COMMENT, comment_token, (lnum, pos), (lnum, pos + len(comment_token)), line) yield (NL, line[nl_pos:], (lnum, nl_pos), (lnum, len(line)), line) else: yield ((NL, COMMENT)[line[pos] == '#'], line[pos:], (lnum, pos), (lnum, len(line)), line) continue if column > indents[-1]: # count indents or dedents indents.append(column) yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line) while column < indents[-1]: if column not in indents: raise IndentationError( "unindent does not match any outer indentation level", ("<tokenize>", lnum, pos, line)) indents = indents[:-1] if async_def and async_def_indent >= indents[-1]: async_def = False async_def_nl = False async_def_indent = 0 yield (DEDENT, '', (lnum, pos), (lnum, pos), line) if async_def and async_def_nl and async_def_indent >= indents[-1]: async_def = False async_def_nl = False async_def_indent = 0 else: # continued statement if not line: raise TokenError("EOF in multi-line statement", (lnum, 0)) continued = 0 while pos < max: pseudomatch = pseudoprog.match(line, pos) if pseudomatch: # scan for tokens start, end = pseudomatch.span(1) spos, epos, pos = (lnum, start), (lnum, end), end token, initial = line[start:end], line[start] if initial in string.digits or \ (initial == '.' and token != '.'): # ordinary number yield (NUMBER, token, spos, epos, line) elif initial in '\r\n': newline = NEWLINE if parenlev > 0: newline = NL elif async_def: async_def_nl = True if stashed: yield stashed stashed = None yield (newline, token, spos, epos, line) elif initial == '#': assert not token.endswith("\n") if stashed: yield stashed stashed = None yield (COMMENT, token, spos, epos, line) elif token in triple_quoted: endprog = endprogs[token] endmatch = endprog.match(line, pos) if endmatch: # all on one line pos = endmatch.end(0) token = line[start:pos] if stashed: yield stashed stashed = None yield (STRING, token, spos, (lnum, pos), line) else: strstart = (lnum, start) # multiple lines contstr = line[start:] contline = line break elif initial in single_quoted or \ token[:2] in single_quoted or \ token[:3] in single_quoted: if token[-1] == '\n': # continued string strstart = (lnum, start) endprog = (endprogs[initial] or endprogs[token[1]] or endprogs[token[2]]) contstr, needcont = line[start:], 1 contline = line break else: # ordinary string if stashed: yield stashed stashed = None yield (STRING, token, spos, epos, line) elif initial.isidentifier(): # ordinary name if token in ('async', 'await'): if async_def: yield (ASYNC if token == 'async' else AWAIT, token, spos, epos, line) continue tok = (NAME, token, spos, epos, line) if token == 'async' and not stashed: stashed = tok continue if token == 'def': if (stashed and stashed[0] == NAME and stashed[1] == 'async'): async_def = True async_def_indent = indents[-1] yield (ASYNC, stashed[1], stashed[2], stashed[3], stashed[4]) stashed = None if stashed: yield stashed stashed = None yield tok elif initial == '\\': # continued stmt # This yield is new; needed for better idempotency: if stashed: yield stashed stashed = None yield (NL, token, spos, (lnum, pos), line) continued = 1 else: if initial in '([{': parenlev = parenlev + 1 elif initial in ')]}': parenlev = parenlev - 1 if stashed: yield stashed stashed = None yield (OP, token, spos, epos, line) else: yield (ERRORTOKEN, line[pos], (lnum, pos), (lnum, pos+1), line) pos = pos + 1 if stashed: yield stashed stashed = None for indent in indents[1:]: # pop remaining indent levels yield (DEDENT, '', (lnum, 0), (lnum, 0), '') yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
The generate_tokens() generator requires one argument, readline, which must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. Alternately, readline can be a callable function terminating with StopIteration: readline = open(myfile).next # Example of alternate readline The generator produces 5-tuples with these members: the token type; the token string; a 2-tuple (srow, scol) of ints specifying the row and column where the token begins in the source; a 2-tuple (erow, ecol) of ints specifying the row and column where the token ends in the source; and the line on which the token was found. The line passed is the logical line; continuation lines are included.
generate_tokens
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/tokenize.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/tokenize.py
MIT
def dump(self, filename): """Dump the grammar tables to a pickle file. dump() recursively changes all dict to OrderedDict, so the pickled file is not exactly the same as what was passed in to dump(). load() uses the pickled file to create the tables, but only changes OrderedDict to dict at the top level; it does not recursively change OrderedDict to dict. So, the loaded tables are different from the original tables that were passed to load() in that some of the OrderedDict (from the pickled file) are not changed back to dict. For parsing, this has no effect on performance because OrderedDict uses dict's __getitem__ with nothing in between. """ with open(filename, "wb") as f: d = _make_deterministic(self.__dict__) pickle.dump(d, f, 2)
Dump the grammar tables to a pickle file. dump() recursively changes all dict to OrderedDict, so the pickled file is not exactly the same as what was passed in to dump(). load() uses the pickled file to create the tables, but only changes OrderedDict to dict at the top level; it does not recursively change OrderedDict to dict. So, the loaded tables are different from the original tables that were passed to load() in that some of the OrderedDict (from the pickled file) are not changed back to dict. For parsing, this has no effect on performance because OrderedDict uses dict's __getitem__ with nothing in between.
dump
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/grammar.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/grammar.py
MIT
def load(self, filename): """Load the grammar tables from a pickle file.""" with open(filename, "rb") as f: d = pickle.load(f) self.__dict__.update(d)
Load the grammar tables from a pickle file.
load
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/grammar.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/grammar.py
MIT
def loads(self, pkl): """Load the grammar tables from a pickle bytes object.""" self.__dict__.update(pickle.loads(pkl))
Load the grammar tables from a pickle bytes object.
loads
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/grammar.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/grammar.py
MIT
def copy(self): """ Copy the grammar. """ new = self.__class__() for dict_attr in ("symbol2number", "number2symbol", "dfas", "keywords", "tokens", "symbol2label"): setattr(new, dict_attr, getattr(self, dict_attr).copy()) new.labels = self.labels[:] new.states = self.states[:] new.start = self.start return new
Copy the grammar.
copy
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/grammar.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/grammar.py
MIT
def report(self): """Dump the grammar tables to standard output, for debugging.""" from pprint import pprint print("s2n") pprint(self.symbol2number) print("n2s") pprint(self.number2symbol) print("states") pprint(self.states) print("dfas") pprint(self.dfas) print("labels") pprint(self.labels) print("start", self.start)
Dump the grammar tables to standard output, for debugging.
report
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/grammar.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/grammar.py
MIT
def __init__(self, grammar, convert=None): """Constructor. The grammar argument is a grammar.Grammar instance; see the grammar module for more information. The parser is not ready yet for parsing; you must call the setup() method to get it started. The optional convert argument is a function mapping concrete syntax tree nodes to abstract syntax tree nodes. If not given, no conversion is done and the syntax tree produced is the concrete syntax tree. If given, it must be a function of two arguments, the first being the grammar (a grammar.Grammar instance), and the second being the concrete syntax tree node to be converted. The syntax tree is converted from the bottom up. A concrete syntax tree node is a (type, value, context, nodes) tuple, where type is the node type (a token or symbol number), value is None for symbols and a string for tokens, context is None or an opaque value used for error reporting (typically a (lineno, offset) pair), and nodes is a list of children for symbols, and None for tokens. An abstract syntax tree node may be anything; this is entirely up to the converter function. """ self.grammar = grammar self.convert = convert or (lambda grammar, node: node)
Constructor. The grammar argument is a grammar.Grammar instance; see the grammar module for more information. The parser is not ready yet for parsing; you must call the setup() method to get it started. The optional convert argument is a function mapping concrete syntax tree nodes to abstract syntax tree nodes. If not given, no conversion is done and the syntax tree produced is the concrete syntax tree. If given, it must be a function of two arguments, the first being the grammar (a grammar.Grammar instance), and the second being the concrete syntax tree node to be converted. The syntax tree is converted from the bottom up. A concrete syntax tree node is a (type, value, context, nodes) tuple, where type is the node type (a token or symbol number), value is None for symbols and a string for tokens, context is None or an opaque value used for error reporting (typically a (lineno, offset) pair), and nodes is a list of children for symbols, and None for tokens. An abstract syntax tree node may be anything; this is entirely up to the converter function.
__init__
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
MIT
def setup(self, start=None): """Prepare for parsing. This *must* be called before starting to parse. The optional argument is an alternative start symbol; it defaults to the grammar's start symbol. You can use a Parser instance to parse any number of programs; each time you call setup() the parser is reset to an initial state determined by the (implicit or explicit) start symbol. """ if start is None: start = self.grammar.start # Each stack entry is a tuple: (dfa, state, node). # A node is a tuple: (type, value, context, children), # where children is a list of nodes or None, and context may be None. newnode = (start, None, None, []) stackentry = (self.grammar.dfas[start], 0, newnode) self.stack = [stackentry] self.rootnode = None self.used_names = set() # Aliased to self.rootnode.used_names in pop()
Prepare for parsing. This *must* be called before starting to parse. The optional argument is an alternative start symbol; it defaults to the grammar's start symbol. You can use a Parser instance to parse any number of programs; each time you call setup() the parser is reset to an initial state determined by the (implicit or explicit) start symbol.
setup
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
MIT
def addtoken(self, type, value, context): """Add a token; return True iff this is the end of the program.""" # Map from token to label ilabel = self.classify(type, value, context) # Loop until the token is shifted; may raise exceptions while True: dfa, state, node = self.stack[-1] states, first = dfa arcs = states[state] # Look for a state with this label for i, newstate in arcs: t, v = self.grammar.labels[i] if ilabel == i: # Look it up in the list of labels assert t < 256 # Shift a token; we're done with it self.shift(type, value, newstate, context) # Pop while we are in an accept-only state state = newstate while states[state] == [(0, state)]: self.pop() if not self.stack: # Done parsing! return True dfa, state, node = self.stack[-1] states, first = dfa # Done with this token return False elif t >= 256: # See if it's a symbol and if we're in its first set itsdfa = self.grammar.dfas[t] itsstates, itsfirst = itsdfa if ilabel in itsfirst: # Push a symbol self.push(t, self.grammar.dfas[t], newstate, context) break # To continue the outer while loop else: if (0, state) in arcs: # An accepting state, pop it and try something else self.pop() if not self.stack: # Done parsing, but another token is input raise ParseError("too much input", type, value, context) else: # No success finding a transition raise ParseError("bad input", type, value, context)
Add a token; return True iff this is the end of the program.
addtoken
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
MIT
def classify(self, type, value, context): """Turn a token into a label. (Internal)""" if type == token.NAME: # Keep a listing of all used names self.used_names.add(value) # Check for reserved words ilabel = self.grammar.keywords.get(value) if ilabel is not None: return ilabel ilabel = self.grammar.tokens.get(type) if ilabel is None: raise ParseError("bad token", type, value, context) return ilabel
Turn a token into a label. (Internal)
classify
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
MIT
def shift(self, type, value, newstate, context): """Shift a token. (Internal)""" dfa, state, node = self.stack[-1] newnode = (type, value, context, None) newnode = self.convert(self.grammar, newnode) if newnode is not None: node[-1].append(newnode) self.stack[-1] = (dfa, newstate, node)
Shift a token. (Internal)
shift
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
MIT
def push(self, type, newdfa, newstate, context): """Push a nonterminal. (Internal)""" dfa, state, node = self.stack[-1] newnode = (type, None, context, []) self.stack[-1] = (dfa, newstate, node) self.stack.append((newdfa, 0, newnode))
Push a nonterminal. (Internal)
push
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
MIT
def pop(self): """Pop a nonterminal. (Internal)""" popdfa, popstate, popnode = self.stack.pop() newnode = self.convert(self.grammar, popnode) if newnode is not None: if self.stack: dfa, state, node = self.stack[-1] node[-1].append(newnode) else: self.rootnode = newnode self.rootnode.used_names = self.used_names
Pop a nonterminal. (Internal)
pop
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/parse.py
MIT
def run(self, graminit_h, graminit_c): """Load the grammar tables from the text files written by pgen.""" self.parse_graminit_h(graminit_h) self.parse_graminit_c(graminit_c) self.finish_off()
Load the grammar tables from the text files written by pgen.
run
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/conv.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/conv.py
MIT
def parse_graminit_h(self, filename): """Parse the .h file written by pgen. (Internal) This file is a sequence of #define statements defining the nonterminals of the grammar as numbers. We build two tables mapping the numbers to names and back. """ try: f = open(filename) except OSError as err: print("Can't open %s: %s" % (filename, err)) return False self.symbol2number = {} self.number2symbol = {} lineno = 0 for line in f: lineno += 1 mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line) if not mo and line.strip(): print("%s(%s): can't parse %s" % (filename, lineno, line.strip())) else: symbol, number = mo.groups() number = int(number) assert symbol not in self.symbol2number assert number not in self.number2symbol self.symbol2number[symbol] = number self.number2symbol[number] = symbol return True
Parse the .h file written by pgen. (Internal) This file is a sequence of #define statements defining the nonterminals of the grammar as numbers. We build two tables mapping the numbers to names and back.
parse_graminit_h
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/conv.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/conv.py
MIT
def parse_graminit_c(self, filename): """Parse the .c file written by pgen. (Internal) The file looks as follows. The first two lines are always this: #include "pgenheaders.h" #include "grammar.h" After that come four blocks: 1) one or more state definitions 2) a table defining dfas 3) a table defining labels 4) a struct defining the grammar A state definition has the following form: - one or more arc arrays, each of the form: static arc arcs_<n>_<m>[<k>] = { {<i>, <j>}, ... }; - followed by a state array, of the form: static state states_<s>[<t>] = { {<k>, arcs_<n>_<m>}, ... }; """ try: f = open(filename) except OSError as err: print("Can't open %s: %s" % (filename, err)) return False # The code below essentially uses f's iterator-ness! lineno = 0 # Expect the two #include lines lineno, line = lineno+1, next(f) assert line == '#include "pgenheaders.h"\n', (lineno, line) lineno, line = lineno+1, next(f) assert line == '#include "grammar.h"\n', (lineno, line) # Parse the state definitions lineno, line = lineno+1, next(f) allarcs = {} states = [] while line.startswith("static arc "): while line.startswith("static arc "): mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$", line) assert mo, (lineno, line) n, m, k = list(map(int, mo.groups())) arcs = [] for _ in range(k): lineno, line = lineno+1, next(f) mo = re.match(r"\s+{(\d+), (\d+)},$", line) assert mo, (lineno, line) i, j = list(map(int, mo.groups())) arcs.append((i, j)) lineno, line = lineno+1, next(f) assert line == "};\n", (lineno, line) allarcs[(n, m)] = arcs lineno, line = lineno+1, next(f) mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line) assert mo, (lineno, line) s, t = list(map(int, mo.groups())) assert s == len(states), (lineno, line) state = [] for _ in range(t): lineno, line = lineno+1, next(f) mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line) assert mo, (lineno, line) k, n, m = list(map(int, mo.groups())) arcs = allarcs[n, m] assert k == len(arcs), (lineno, line) state.append(arcs) states.append(state) lineno, line = lineno+1, next(f) assert line == "};\n", (lineno, line) lineno, line = lineno+1, next(f) self.states = states # Parse the dfas dfas = {} mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line) assert mo, (lineno, line) ndfas = int(mo.group(1)) for i in range(ndfas): lineno, line = lineno+1, next(f) mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$', line) assert mo, (lineno, line) symbol = mo.group(2) number, x, y, z = list(map(int, mo.group(1, 3, 4, 5))) assert self.symbol2number[symbol] == number, (lineno, line) assert self.number2symbol[number] == symbol, (lineno, line) assert x == 0, (lineno, line) state = states[z] assert y == len(state), (lineno, line) lineno, line = lineno+1, next(f) mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line) assert mo, (lineno, line) first = {} rawbitset = eval(mo.group(1)) for i, c in enumerate(rawbitset): byte = ord(c) for j in range(8): if byte & (1<<j): first[i*8 + j] = 1 dfas[number] = (state, first) lineno, line = lineno+1, next(f) assert line == "};\n", (lineno, line) self.dfas = dfas # Parse the labels labels = [] lineno, line = lineno+1, next(f) mo = re.match(r"static label labels\[(\d+)\] = {$", line) assert mo, (lineno, line) nlabels = int(mo.group(1)) for i in range(nlabels): lineno, line = lineno+1, next(f) mo = re.match(r'\s+{(\d+), (0|"\w+")},$', line) assert mo, (lineno, line) x, y = mo.groups() x = int(x) if y == "0": y = None else: y = eval(y) labels.append((x, y)) lineno, line = lineno+1, next(f) assert line == "};\n", (lineno, line) self.labels = labels # Parse the grammar struct lineno, line = lineno+1, next(f) assert line == "grammar _PyParser_Grammar = {\n", (lineno, line) lineno, line = lineno+1, next(f) mo = re.match(r"\s+(\d+),$", line) assert mo, (lineno, line) ndfas = int(mo.group(1)) assert ndfas == len(self.dfas) lineno, line = lineno+1, next(f) assert line == "\tdfas,\n", (lineno, line) lineno, line = lineno+1, next(f) mo = re.match(r"\s+{(\d+), labels},$", line) assert mo, (lineno, line) nlabels = int(mo.group(1)) assert nlabels == len(self.labels), (lineno, line) lineno, line = lineno+1, next(f) mo = re.match(r"\s+(\d+)$", line) assert mo, (lineno, line) start = int(mo.group(1)) assert start in self.number2symbol, (lineno, line) self.start = start lineno, line = lineno+1, next(f) assert line == "};\n", (lineno, line) try: lineno, line = lineno+1, next(f) except StopIteration: pass else: assert 0, (lineno, line)
Parse the .c file written by pgen. (Internal) The file looks as follows. The first two lines are always this: #include "pgenheaders.h" #include "grammar.h" After that come four blocks: 1) one or more state definitions 2) a table defining dfas 3) a table defining labels 4) a struct defining the grammar A state definition has the following form: - one or more arc arrays, each of the form: static arc arcs_<n>_<m>[<k>] = { {<i>, <j>}, ... }; - followed by a state array, of the form: static state states_<s>[<t>] = { {<k>, arcs_<n>_<m>}, ... };
parse_graminit_c
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/conv.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/conv.py
MIT
def finish_off(self): """Create additional useful structures. (Internal).""" self.keywords = {} # map from keyword strings to arc labels self.tokens = {} # map from numeric token values to arc labels for ilabel, (type, value) in enumerate(self.labels): if type == token.NAME and value is not None: self.keywords[value] = ilabel elif value is None: self.tokens[type] = ilabel
Create additional useful structures. (Internal).
finish_off
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/conv.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/pgen2/conv.py
MIT
def transform_import(self, node, results): """Transform for the basic import case. Replaces the old import name with a comma separated list of its replacements. """ import_mod = results.get("module") pref = import_mod.prefix names = [] # create a Node list of the replacement modules for name in MAPPING[import_mod.value][:-1]: names.extend([Name(name[0], prefix=pref), Comma()]) names.append(Name(MAPPING[import_mod.value][-1][0], prefix=pref)) import_mod.replace(names)
Transform for the basic import case. Replaces the old import name with a comma separated list of its replacements.
transform_import
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_urllib.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_urllib.py
MIT
def transform_member(self, node, results): """Transform for imports of specific module elements. Replaces the module to be imported from with the appropriate new module. """ mod_member = results.get("mod_member") pref = mod_member.prefix member = results.get("member") # Simple case with only a single member being imported if member: # this may be a list of length one, or just a node if isinstance(member, list): member = member[0] new_name = None for change in MAPPING[mod_member.value]: if member.value in change[1]: new_name = change[0] break if new_name: mod_member.replace(Name(new_name, prefix=pref)) else: self.cannot_convert(node, "This is an invalid module element") # Multiple members being imported else: # a dictionary for replacements, order matters modules = [] mod_dict = {} members = results["members"] for member in members: # we only care about the actual members if member.type == syms.import_as_name: as_name = member.children[2].value member_name = member.children[0].value else: member_name = member.value as_name = None if member_name != ",": for change in MAPPING[mod_member.value]: if member_name in change[1]: if change[0] not in mod_dict: modules.append(change[0]) mod_dict.setdefault(change[0], []).append(member) new_nodes = [] indentation = find_indentation(node) first = True def handle_name(name, prefix): if name.type == syms.import_as_name: kids = [Name(name.children[0].value, prefix=prefix), name.children[1].clone(), name.children[2].clone()] return [Node(syms.import_as_name, kids)] return [Name(name.value, prefix=prefix)] for module in modules: elts = mod_dict[module] names = [] for elt in elts[:-1]: names.extend(handle_name(elt, pref)) names.append(Comma()) names.extend(handle_name(elts[-1], pref)) new = FromImport(module, names) if not first or node.parent.prefix.endswith(indentation): new.prefix = indentation new_nodes.append(new) first = False if new_nodes: nodes = [] for new_node in new_nodes[:-1]: nodes.extend([new_node, Newline()]) nodes.append(new_nodes[-1]) node.replace(nodes) else: self.cannot_convert(node, "All module elements are invalid")
Transform for imports of specific module elements. Replaces the module to be imported from with the appropriate new module.
transform_member
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_urllib.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_urllib.py
MIT
def transform_dot(self, node, results): """Transform for calls to module members in code.""" module_dot = results.get("bare_with_attr") member = results.get("member") new_name = None if isinstance(member, list): member = member[0] for change in MAPPING[module_dot.value]: if member.value in change[1]: new_name = change[0] break if new_name: module_dot.replace(Name(new_name, prefix=module_dot.prefix)) else: self.cannot_convert(node, "This is an invalid module element")
Transform for calls to module members in code.
transform_dot
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_urllib.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_urllib.py
MIT
def build_pattern(): #bare = set() for module, replace in list(MAPPING.items()): for old_attr, new_attr in list(replace.items()): LOOKUP[(module, old_attr)] = new_attr #bare.add(module) #bare.add(old_attr) #yield """ # import_name< 'import' (module=%r # | dotted_as_names< any* module=%r any* >) > # """ % (module, module) yield """ import_from< 'from' module_name=%r 'import' ( attr_name=%r | import_as_name< attr_name=%r 'as' any >) > """ % (module, old_attr, old_attr) yield """ power< module_name=%r trailer< '.' attr_name=%r > any* > """ % (module, old_attr)
% (module, old_attr, old_attr) yield
build_pattern
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_renames.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_renames.py
MIT
def has_metaclass(parent): """ we have to check the cls_node without changing it. There are two possibilities: 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') """ for node in parent.children: if node.type == syms.suite: return has_metaclass(node) elif node.type == syms.simple_stmt and node.children: expr_node = node.children[0] if expr_node.type == syms.expr_stmt and expr_node.children: left_side = expr_node.children[0] if isinstance(left_side, Leaf) and \ left_side.value == '__metaclass__': return True return False
we have to check the cls_node without changing it. There are two possibilities: 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta')
has_metaclass
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_metaclass.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_metaclass.py
MIT
def fixup_parse_tree(cls_node): """ one-line classes don't get a suite in the parse tree so we add one to normalize the tree """ for node in cls_node.children: if node.type == syms.suite: # already in the preferred format, do nothing return # !%@#! oneliners have no suite node, we have to fake one up for i, node in enumerate(cls_node.children): if node.type == token.COLON: break else: raise ValueError("No class suite and no ':'!") # move everything into a suite node suite = Node(syms.suite, []) while cls_node.children[i+1:]: move_node = cls_node.children[i+1] suite.append_child(move_node.clone()) move_node.remove() cls_node.append_child(suite) node = suite
one-line classes don't get a suite in the parse tree so we add one to normalize the tree
fixup_parse_tree
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_metaclass.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_metaclass.py
MIT
def fixup_simple_stmt(parent, i, stmt_node): """ if there is a semi-colon all the parts count as part of the same simple_stmt. We just want the __metaclass__ part so we move everything after the semi-colon into its own simple_stmt node """ for semi_ind, node in enumerate(stmt_node.children): if node.type == token.SEMI: # *sigh* break else: return node.remove() # kill the semicolon new_expr = Node(syms.expr_stmt, []) new_stmt = Node(syms.simple_stmt, [new_expr]) while stmt_node.children[semi_ind:]: move_node = stmt_node.children[semi_ind] new_expr.append_child(move_node.clone()) move_node.remove() parent.insert_child(i, new_stmt) new_leaf1 = new_stmt.children[0].children[0] old_leaf1 = stmt_node.children[0].children[0] new_leaf1.prefix = old_leaf1.prefix
if there is a semi-colon all the parts count as part of the same simple_stmt. We just want the __metaclass__ part so we move everything after the semi-colon into its own simple_stmt node
fixup_simple_stmt
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_metaclass.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_metaclass.py
MIT
def fixup_indent(suite): """ If an INDENT is followed by a thing with a prefix then nuke the prefix Otherwise we get in trouble when removing __metaclass__ at suite start """ kids = suite.children[::-1] # find the first indent while kids: node = kids.pop() if node.type == token.INDENT: break # find the first Leaf while kids: node = kids.pop() if isinstance(node, Leaf) and node.type != token.DEDENT: if node.prefix: node.prefix = '' return else: kids.extend(node.children[::-1])
If an INDENT is followed by a thing with a prefix then nuke the prefix Otherwise we get in trouble when removing __metaclass__ at suite start
fixup_indent
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_metaclass.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_metaclass.py
MIT
def traverse_imports(names): """ Walks over all the names imported in a dotted_as_names node. """ pending = [names] while pending: node = pending.pop() if node.type == token.NAME: yield node.value elif node.type == syms.dotted_name: yield "".join([ch.value for ch in node.children]) elif node.type == syms.dotted_as_name: pending.append(node.children[0]) elif node.type == syms.dotted_as_names: pending.extend(node.children[::-2]) else: raise AssertionError("unknown node type")
Walks over all the names imported in a dotted_as_names node.
traverse_imports
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_import.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/lib2to3/fixes/fix_import.py
MIT
def _other_endian(typ): """Return the type with the 'other' byte order. Simple types like c_int and so on already have __ctype_be__ and __ctype_le__ attributes which contain the types, for more complicated types arrays and structures are supported. """ # check _OTHER_ENDIAN attribute (present if typ is primitive type) if hasattr(typ, _OTHER_ENDIAN): return getattr(typ, _OTHER_ENDIAN) # if typ is array if isinstance(typ, _array_type): return _other_endian(typ._type_) * typ._length_ # if typ is structure if issubclass(typ, Structure): return typ raise TypeError("This type does not support other endian: %s" % typ)
Return the type with the 'other' byte order. Simple types like c_int and so on already have __ctype_be__ and __ctype_le__ attributes which contain the types, for more complicated types arrays and structures are supported.
_other_endian
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/ctypes/_endian.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/ctypes/_endian.py
MIT
def get_ld_headers(file): """ Parse the header of the loader section of executable and archives This function calls /usr/bin/dump -H as a subprocess and returns a list of (ld_header, ld_header_info) tuples. """ # get_ld_headers parsing: # 1. Find a line that starts with /, ./, or ../ - set as ld_header # 2. If "INDEX" in occurs in a following line - return ld_header # 3. get info (lines starting with [0-9]) ldr_headers = [] p = Popen(["/usr/bin/dump", f"-X{AIX_ABI}", "-H", file], universal_newlines=True, stdout=PIPE, stderr=DEVNULL) # be sure to read to the end-of-file - getting all entries while True: ld_header = get_ld_header(p) if ld_header: ldr_headers.append((ld_header, get_ld_header_info(p))) else: break p.stdout.close() p.wait() return ldr_headers
Parse the header of the loader section of executable and archives This function calls /usr/bin/dump -H as a subprocess and returns a list of (ld_header, ld_header_info) tuples.
get_ld_headers
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/ctypes/_aix.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/ctypes/_aix.py
MIT
def get_shared(ld_headers): """ extract the shareable objects from ld_headers character "[" is used to strip off the path information. Note: the "[" and "]" characters that are part of dump -H output are not removed here. """ shared = [] for (line, _) in ld_headers: # potential member lines contain "[" # otherwise, no processing needed if "[" in line: # Strip off trailing colon (:) shared.append(line[line.index("["):-1]) return shared
extract the shareable objects from ld_headers character "[" is used to strip off the path information. Note: the "[" and "]" characters that are part of dump -H output are not removed here.
get_shared
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/ctypes/_aix.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/ctypes/_aix.py
MIT
def get_one_match(expr, lines): """ Must be only one match, otherwise result is None. When there is a match, strip leading "[" and trailing "]" """ # member names in the ld_headers output are between square brackets expr = rf'\[({expr})\]' matches = list(filter(None, (re.search(expr, line) for line in lines))) if len(matches) == 1: return matches[0].group(1) else: return None
Must be only one match, otherwise result is None. When there is a match, strip leading "[" and trailing "]"
get_one_match
python
sajjadium/ctf-archives
ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/ctypes/_aix.py
https://github.com/sajjadium/ctf-archives/blob/master/ctfs/TyphoonCon/2022/pwn/beautifier_player/python3.7/lib/python3.7/ctypes/_aix.py
MIT