Spaces:
Running
Running
# parser.py | |
import ast | |
def get_category(node): | |
"""Determine the category of an AST node.""" | |
if isinstance(node, (ast.Import, ast.ImportFrom)): | |
return 'import' | |
elif isinstance(node, (ast.Assign, ast.AnnAssign, ast.AugAssign)): | |
return 'assignment' | |
elif isinstance(node, ast.FunctionDef): | |
return 'function' | |
elif isinstance(node, ast.AsyncFunctionDef): | |
return 'async_function' | |
elif isinstance(node, ast.ClassDef): | |
return 'class' | |
elif isinstance(node, ast.If): | |
return 'if' | |
elif isinstance(node, ast.While): | |
return 'while' | |
elif isinstance(node, ast.For): | |
return 'for' | |
elif isinstance(node, ast.Try): | |
return 'try' | |
elif isinstance(node, ast.Return): | |
return 'return' | |
elif isinstance(node, ast.Expr): | |
return 'expression' | |
else: | |
return 'other' | |
def create_vector(category, level, location, total_lines, parent_path): | |
"""Create a vector optimized for role similarity.""" | |
category_map = { | |
'import': 1, 'assignment': 2, 'function': 3, 'async_function': 4, 'class': 5, | |
'if': 6, 'while': 7, 'for': 8, 'try': 9, 'expression': 10, 'spacer': 11, | |
'other': 12, 'elif': 13, 'else': 14, 'except': 15, 'finally': 16, 'return': 17 | |
} | |
category_id = category_map.get(category, 0) | |
start_line, end_line = location | |
span = (end_line - start_line + 1) / total_lines | |
center_pos = ((start_line + end_line) / 2) / total_lines | |
parent_depth = len(parent_path) | |
parent_weight = sum(category_map.get(parent.split('[')[0].lower(), 0) * (1 / (i + 1)) | |
for i, parent in enumerate(parent_path)) / max(1, len(category_map)) | |
return [category_id, level, center_pos, span, parent_depth, parent_weight] | |
def parse_node(node, lines, prev_end, level=0, total_lines=None, parent_path=None, counters=None, processed_lines=None): | |
if total_lines is None: | |
total_lines = len(lines) | |
if parent_path is None: | |
parent_path = [] | |
if counters is None: | |
counters = {cat: 0 for cat in ['import', 'assignment', 'function', 'async_function', 'class', 'if', 'while', 'for', 'try', 'return', 'expression', 'other', 'spacer', 'elif', 'else', 'except', 'finally']} | |
if processed_lines is None: | |
processed_lines = set() | |
parts = [] | |
start_line = getattr(node, 'lineno', prev_end + 1) | |
end_line = getattr(node, 'end_lineno', start_line) | |
# Skip if any lines are already processed | |
if any(line in processed_lines for line in range(start_line, end_line + 1)): | |
return parts | |
category = get_category(node) | |
counters[category] += 1 | |
node_id = f"{category.capitalize()}[{counters[category]}]" | |
# Spacer before node | |
if start_line > prev_end + 1: | |
spacer_lines = lines[prev_end:start_line - 1] | |
spacer_lines_set = set(range(prev_end + 1, start_line)) | |
if not spacer_lines_set.issubset(processed_lines): | |
counters['spacer'] += 1 | |
spacer_node_id = f"Spacer[{counters['spacer']}]" | |
parts.append({ | |
'category': 'spacer', | |
'source': ''.join(spacer_lines), | |
'location': (prev_end + 1, start_line - 1), | |
'level': level, | |
'vector': create_vector('spacer', level, (prev_end + 1, start_line - 1), total_lines, parent_path), | |
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
'node_id': spacer_node_id | |
}) | |
processed_lines.update(spacer_lines_set) | |
# Current node's header (e.g., 'def', 'if') | |
current_path = parent_path + [node_id] | |
if start_line not in processed_lines: | |
parts.append({ | |
'category': category, | |
'source': lines[start_line - 1], | |
'location': (start_line, start_line), | |
'level': level, | |
'vector': create_vector(category, level, (start_line, start_line), total_lines, current_path), | |
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
'node_id': node_id | |
}) | |
processed_lines.add(start_line) | |
# Process nested bodies | |
nested_prev_end = start_line | |
for attr in ('body', 'orelse', 'handlers', 'finalbody'): | |
if hasattr(node, attr) and getattr(node, attr): | |
for child in getattr(node, attr): | |
if attr == 'orelse' and isinstance(node, ast.If) and child.lineno != start_line: | |
sub_category = 'elif' if 'elif' in lines[child.lineno - 1] else 'else' | |
sub_start = child.lineno | |
sub_end = getattr(child, 'end_lineno', sub_start) | |
if not any(line in processed_lines for line in range(sub_start, sub_end + 1)): | |
counters[sub_category] += 1 | |
sub_node_id = f"{sub_category.capitalize()}[{counters[sub_category]}]" | |
parts.append({ | |
'category': sub_category, | |
'source': lines[sub_start - 1], | |
'location': (sub_start, sub_start), | |
'level': level, | |
'vector': create_vector(sub_category, level, (sub_start, sub_start), total_lines, current_path), | |
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
'node_id': sub_node_id | |
}) | |
processed_lines.add(sub_start) | |
child_parts = parse_node(child, lines, sub_start, level + 1, total_lines, current_path, counters, processed_lines) | |
parts.extend(child_parts) | |
nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else sub_start) | |
elif attr == 'handlers' and isinstance(child, ast.ExceptHandler): | |
sub_start = child.lineno | |
sub_end = getattr(child, 'end_lineno', sub_start) | |
if not any(line in processed_lines for line in range(sub_start, sub_end + 1)): | |
counters['except'] += 1 | |
sub_node_id = f"Except[{counters['except']}]" | |
parts.append({ | |
'category': 'except', | |
'source': lines[sub_start - 1], | |
'location': (sub_start, sub_start), | |
'level': level, | |
'vector': create_vector('except', level, (sub_start, sub_start), total_lines, current_path), | |
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
'node_id': sub_node_id | |
}) | |
processed_lines.add(sub_start) | |
child_parts = parse_node(child, lines, sub_start, level + 1, total_lines, current_path, counters, processed_lines) | |
parts.extend(child_parts) | |
nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else sub_start) | |
elif attr == 'finalbody': | |
sub_start = child.lineno | |
sub_end = getattr(child, 'end_lineno', sub_start) | |
if not any(line in processed_lines for line in range(sub_start, sub_end + 1)): | |
counters['finally'] += 1 | |
sub_node_id = f"Finally[{counters['finally']}]" | |
parts.append({ | |
'category': 'finally', | |
'source': lines[sub_start - 1], | |
'location': (sub_start, sub_start), | |
'level': level, | |
'vector': create_vector('finally', level, (sub_start, sub_start), total_lines, current_path), | |
'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
'node_id': sub_node_id | |
}) | |
processed_lines.add(sub_start) | |
child_parts = parse_node(child, lines, sub_start, level + 1, total_lines, current_path, counters, processed_lines) | |
parts.extend(child_parts) | |
nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else sub_start) | |
else: | |
child_parts = parse_node(child, lines, nested_prev_end, level + 1, total_lines, current_path, counters, processed_lines) | |
parts.extend(child_parts) | |
nested_prev_end = child_parts[-1]['location'][1] if child_parts else nested_prev_end | |
# Update end_line and source of the parent node if its body extends it | |
if nested_prev_end > start_line and start_line not in processed_lines: | |
final_end = nested_prev_end | |
parts[-1]['location'] = (start_line, final_end) | |
parts[-1]['source'] = ''.join(lines[start_line - 1:final_end]) | |
parts[-1]['vector'] = create_vector(category, level, (start_line, final_end), total_lines, current_path) | |
processed_lines.update(range(start_line, final_end + 1)) | |
return parts | |
def parse_python_code(code): | |
lines = code.splitlines(keepends=True) | |
total_lines = len(lines) | |
try: | |
tree = ast.parse(code) | |
except SyntaxError: | |
return [{'category': 'error', 'source': 'Invalid Python code', 'location': (1, 1), 'level': 0, 'vector': [0, 0, 1.0, 0.0, 0, 0], 'parent_path': 'Top-Level', 'node_id': 'Error[1]'}] | |
parts = [] | |
prev_end = 0 | |
processed_lines = set() | |
for stmt in tree.body: | |
stmt_parts = parse_node(stmt, lines, prev_end, total_lines=total_lines, processed_lines=processed_lines) | |
parts.extend(stmt_parts) | |
prev_end = stmt_parts[-1]['location'][1] if stmt_parts else prev_end | |
if prev_end < total_lines: | |
remaining_lines = lines[prev_end:] | |
remaining_lines_set = set(range(prev_end + 1, total_lines + 1)) | |
if not remaining_lines_set.issubset(processed_lines): | |
counters = {'spacer': 0} | |
counters['spacer'] += 1 | |
spacer_node_id = f"Spacer[{counters['spacer']}]" | |
parts.append({ | |
'category': 'spacer', | |
'source': ''.join(remaining_lines), | |
'location': (prev_end + 1, total_lines + 1), | |
'level': 0, | |
'vector': create_vector('spacer', 0, (prev_end + 1, total_lines + 1), total_lines, []), | |
'parent_path': 'Top-Level', | |
'node_id': spacer_node_id | |
}) | |
processed_lines.update(remaining_lines_set) | |
return parts |