broadfield-dev commited on
Commit
7151f8d
·
verified ·
1 Parent(s): 5ebfd65

Update parser.py

Browse files
Files changed (1) hide show
  1. parser.py +106 -84
parser.py CHANGED
@@ -44,49 +44,59 @@ def create_vector(category, level, location, total_lines, parent_path):
44
  for i, parent in enumerate(parent_path)) / max(1, len(category_map))
45
  return [category_id, level, center_pos, span, parent_depth, parent_weight]
46
 
47
- def parse_node(node, lines, prev_end, level=0, total_lines=None, parent_path=None, counters=None):
48
  if total_lines is None:
49
  total_lines = len(lines)
50
  if parent_path is None:
51
  parent_path = []
52
  if counters is None:
53
  counters = {cat: 0 for cat in ['import', 'assignment', 'function', 'async_function', 'class', 'if', 'while', 'for', 'try', 'return', 'expression', 'other', 'spacer', 'elif', 'else', 'except', 'finally']}
 
 
54
 
55
  parts = []
56
  start_line = getattr(node, 'lineno', prev_end + 1)
57
  end_line = getattr(node, 'end_lineno', start_line)
58
- category = get_category(node)
59
 
 
 
 
 
 
60
  counters[category] += 1
61
  node_id = f"{category.capitalize()}[{counters[category]}]"
62
 
63
  # Spacer before node
64
  if start_line > prev_end + 1:
65
  spacer_lines = lines[prev_end:start_line - 1]
66
- counters['spacer'] += 1
67
- spacer_node_id = f"Spacer[{counters['spacer']}]"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
  parts.append({
69
- 'category': 'spacer',
70
- 'source': ''.join(spacer_lines),
71
- 'location': (prev_end + 1, start_line - 1),
72
  'level': level,
73
- 'vector': create_vector('spacer', level, (prev_end + 1, start_line - 1), total_lines, parent_path),
74
  'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
75
- 'node_id': spacer_node_id
76
  })
77
-
78
- # Current node's header (e.g., 'def', 'if')
79
- current_path = parent_path + [node_id]
80
- node_lines = lines[start_line - 1:end_line]
81
- parts.append({
82
- 'category': category,
83
- 'source': node_lines[0], # Only the header line
84
- 'location': (start_line, start_line),
85
- 'level': level,
86
- 'vector': create_vector(category, level, (start_line, start_line), total_lines, current_path),
87
- 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
88
- 'node_id': node_id
89
- })
90
 
91
  # Process nested bodies
92
  nested_prev_end = start_line
@@ -95,66 +105,74 @@ def parse_node(node, lines, prev_end, level=0, total_lines=None, parent_path=Non
95
  for child in getattr(node, attr):
96
  if attr == 'orelse' and isinstance(node, ast.If) and child.lineno != start_line:
97
  sub_category = 'elif' if 'elif' in lines[child.lineno - 1] else 'else'
98
- counters[sub_category] += 1
99
- sub_node_id = f"{sub_category.capitalize()}[{counters[sub_category]}]"
100
  sub_start = child.lineno
101
  sub_end = getattr(child, 'end_lineno', sub_start)
102
- parts.append({
103
- 'category': sub_category,
104
- 'source': lines[sub_start - 1],
105
- 'location': (sub_start, sub_start),
106
- 'level': level,
107
- 'vector': create_vector(sub_category, level, (sub_start, sub_start), total_lines, current_path),
108
- 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
109
- 'node_id': sub_node_id
110
- })
111
- child_parts = parse_node(child, lines, sub_start, level + 1, total_lines, current_path, counters)
112
- parts.extend(child_parts)
113
- nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else sub_start)
 
 
 
 
114
  elif attr == 'handlers' and isinstance(child, ast.ExceptHandler):
115
- counters['except'] += 1
116
- sub_node_id = f"Except[{counters['except']}]"
117
  sub_start = child.lineno
118
  sub_end = getattr(child, 'end_lineno', sub_start)
119
- parts.append({
120
- 'category': 'except',
121
- 'source': lines[sub_start - 1],
122
- 'location': (sub_start, sub_start),
123
- 'level': level,
124
- 'vector': create_vector('except', level, (sub_start, sub_start), total_lines, current_path),
125
- 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
126
- 'node_id': sub_node_id
127
- })
128
- child_parts = parse_node(child, lines, sub_start, level + 1, total_lines, current_path, counters)
129
- parts.extend(child_parts)
130
- nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else sub_start)
 
 
 
 
131
  elif attr == 'finalbody':
132
- counters['finally'] += 1
133
- sub_node_id = f"Finally[{counters['finally']}]"
134
  sub_start = child.lineno
135
  sub_end = getattr(child, 'end_lineno', sub_start)
136
- parts.append({
137
- 'category': 'finally',
138
- 'source': lines[sub_start - 1],
139
- 'location': (sub_start, sub_start),
140
- 'level': level,
141
- 'vector': create_vector('finally', level, (sub_start, sub_start), total_lines, current_path),
142
- 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
143
- 'node_id': sub_node_id
144
- })
145
- child_parts = parse_node(child, lines, sub_start, level + 1, total_lines, current_path, counters)
146
- parts.extend(child_parts)
147
- nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else sub_start)
 
 
 
 
148
  else:
149
- child_parts = parse_node(child, lines, nested_prev_end, level + 1, total_lines, current_path, counters)
150
  parts.extend(child_parts)
151
  nested_prev_end = child_parts[-1]['location'][1] if child_parts else nested_prev_end
152
 
153
- # Update end_line of the parent node to reflect its full scope
154
- if nested_prev_end > start_line:
155
- parts[-1]['location'] = (start_line, nested_prev_end)
156
- parts[-1]['source'] = ''.join(lines[start_line - 1:nested_prev_end])
157
- parts[-1]['vector'] = create_vector(category, level, (start_line, nested_prev_end), total_lines, current_path)
 
 
158
 
159
  return parts
160
 
@@ -168,25 +186,29 @@ def parse_python_code(code):
168
 
169
  parts = []
170
  prev_end = 0
 
171
 
172
  for stmt in tree.body:
173
- stmt_parts = parse_node(stmt, lines, prev_end, total_lines=total_lines)
174
  parts.extend(stmt_parts)
175
- prev_end = stmt_parts[-1]['location'][1]
176
 
177
  if prev_end < total_lines:
178
  remaining_lines = lines[prev_end:]
179
- counters = {'spacer': 0}
180
- counters['spacer'] += 1
181
- spacer_node_id = f"Spacer[{counters['spacer']}]"
182
- parts.append({
183
- 'category': 'spacer',
184
- 'source': ''.join(remaining_lines),
185
- 'location': (prev_end + 1, total_lines + 1),
186
- 'level': 0,
187
- 'vector': create_vector('spacer', 0, (prev_end + 1, total_lines + 1), total_lines, []),
188
- 'parent_path': 'Top-Level',
189
- 'node_id': spacer_node_id
190
- })
 
 
 
191
 
192
  return parts
 
44
  for i, parent in enumerate(parent_path)) / max(1, len(category_map))
45
  return [category_id, level, center_pos, span, parent_depth, parent_weight]
46
 
47
+ def parse_node(node, lines, prev_end, level=0, total_lines=None, parent_path=None, counters=None, processed_lines=None):
48
  if total_lines is None:
49
  total_lines = len(lines)
50
  if parent_path is None:
51
  parent_path = []
52
  if counters is None:
53
  counters = {cat: 0 for cat in ['import', 'assignment', 'function', 'async_function', 'class', 'if', 'while', 'for', 'try', 'return', 'expression', 'other', 'spacer', 'elif', 'else', 'except', 'finally']}
54
+ if processed_lines is None:
55
+ processed_lines = set()
56
 
57
  parts = []
58
  start_line = getattr(node, 'lineno', prev_end + 1)
59
  end_line = getattr(node, 'end_lineno', start_line)
 
60
 
61
+ # Skip if any lines are already processed
62
+ if any(line in processed_lines for line in range(start_line, end_line + 1)):
63
+ return parts
64
+
65
+ category = get_category(node)
66
  counters[category] += 1
67
  node_id = f"{category.capitalize()}[{counters[category]}]"
68
 
69
  # Spacer before node
70
  if start_line > prev_end + 1:
71
  spacer_lines = lines[prev_end:start_line - 1]
72
+ spacer_lines_set = set(range(prev_end + 1, start_line))
73
+ if not spacer_lines_set.issubset(processed_lines):
74
+ counters['spacer'] += 1
75
+ spacer_node_id = f"Spacer[{counters['spacer']}]"
76
+ parts.append({
77
+ 'category': 'spacer',
78
+ 'source': ''.join(spacer_lines),
79
+ 'location': (prev_end + 1, start_line - 1),
80
+ 'level': level,
81
+ 'vector': create_vector('spacer', level, (prev_end + 1, start_line - 1), total_lines, parent_path),
82
+ 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
83
+ 'node_id': spacer_node_id
84
+ })
85
+ processed_lines.update(spacer_lines_set)
86
+
87
+ # Current node's header (e.g., 'def', 'if')
88
+ current_path = parent_path + [node_id]
89
+ if start_line not in processed_lines:
90
  parts.append({
91
+ 'category': category,
92
+ 'source': lines[start_line - 1],
93
+ 'location': (start_line, start_line),
94
  'level': level,
95
+ 'vector': create_vector(category, level, (start_line, start_line), total_lines, current_path),
96
  'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
97
+ 'node_id': node_id
98
  })
99
+ processed_lines.add(start_line)
 
 
 
 
 
 
 
 
 
 
 
 
100
 
101
  # Process nested bodies
102
  nested_prev_end = start_line
 
105
  for child in getattr(node, attr):
106
  if attr == 'orelse' and isinstance(node, ast.If) and child.lineno != start_line:
107
  sub_category = 'elif' if 'elif' in lines[child.lineno - 1] else 'else'
 
 
108
  sub_start = child.lineno
109
  sub_end = getattr(child, 'end_lineno', sub_start)
110
+ if not any(line in processed_lines for line in range(sub_start, sub_end + 1)):
111
+ counters[sub_category] += 1
112
+ sub_node_id = f"{sub_category.capitalize()}[{counters[sub_category]}]"
113
+ parts.append({
114
+ 'category': sub_category,
115
+ 'source': lines[sub_start - 1],
116
+ 'location': (sub_start, sub_start),
117
+ 'level': level,
118
+ 'vector': create_vector(sub_category, level, (sub_start, sub_start), total_lines, current_path),
119
+ 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
120
+ 'node_id': sub_node_id
121
+ })
122
+ processed_lines.add(sub_start)
123
+ child_parts = parse_node(child, lines, sub_start, level + 1, total_lines, current_path, counters, processed_lines)
124
+ parts.extend(child_parts)
125
+ nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else sub_start)
126
  elif attr == 'handlers' and isinstance(child, ast.ExceptHandler):
 
 
127
  sub_start = child.lineno
128
  sub_end = getattr(child, 'end_lineno', sub_start)
129
+ if not any(line in processed_lines for line in range(sub_start, sub_end + 1)):
130
+ counters['except'] += 1
131
+ sub_node_id = f"Except[{counters['except']}]"
132
+ parts.append({
133
+ 'category': 'except',
134
+ 'source': lines[sub_start - 1],
135
+ 'location': (sub_start, sub_start),
136
+ 'level': level,
137
+ 'vector': create_vector('except', level, (sub_start, sub_start), total_lines, current_path),
138
+ 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
139
+ 'node_id': sub_node_id
140
+ })
141
+ processed_lines.add(sub_start)
142
+ child_parts = parse_node(child, lines, sub_start, level + 1, total_lines, current_path, counters, processed_lines)
143
+ parts.extend(child_parts)
144
+ nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else sub_start)
145
  elif attr == 'finalbody':
 
 
146
  sub_start = child.lineno
147
  sub_end = getattr(child, 'end_lineno', sub_start)
148
+ if not any(line in processed_lines for line in range(sub_start, sub_end + 1)):
149
+ counters['finally'] += 1
150
+ sub_node_id = f"Finally[{counters['finally']}]"
151
+ parts.append({
152
+ 'category': 'finally',
153
+ 'source': lines[sub_start - 1],
154
+ 'location': (sub_start, sub_start),
155
+ 'level': level,
156
+ 'vector': create_vector('finally', level, (sub_start, sub_start), total_lines, current_path),
157
+ 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level',
158
+ 'node_id': sub_node_id
159
+ })
160
+ processed_lines.add(sub_start)
161
+ child_parts = parse_node(child, lines, sub_start, level + 1, total_lines, current_path, counters, processed_lines)
162
+ parts.extend(child_parts)
163
+ nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else sub_start)
164
  else:
165
+ child_parts = parse_node(child, lines, nested_prev_end, level + 1, total_lines, current_path, counters, processed_lines)
166
  parts.extend(child_parts)
167
  nested_prev_end = child_parts[-1]['location'][1] if child_parts else nested_prev_end
168
 
169
+ # Update end_line and source of the parent node if its body extends it
170
+ if nested_prev_end > start_line and start_line not in processed_lines:
171
+ final_end = nested_prev_end
172
+ parts[-1]['location'] = (start_line, final_end)
173
+ parts[-1]['source'] = ''.join(lines[start_line - 1:final_end])
174
+ parts[-1]['vector'] = create_vector(category, level, (start_line, final_end), total_lines, current_path)
175
+ processed_lines.update(range(start_line, final_end + 1))
176
 
177
  return parts
178
 
 
186
 
187
  parts = []
188
  prev_end = 0
189
+ processed_lines = set()
190
 
191
  for stmt in tree.body:
192
+ stmt_parts = parse_node(stmt, lines, prev_end, total_lines=total_lines, processed_lines=processed_lines)
193
  parts.extend(stmt_parts)
194
+ prev_end = stmt_parts[-1]['location'][1] if stmt_parts else prev_end
195
 
196
  if prev_end < total_lines:
197
  remaining_lines = lines[prev_end:]
198
+ remaining_lines_set = set(range(prev_end + 1, total_lines + 1))
199
+ if not remaining_lines_set.issubset(processed_lines):
200
+ counters = {'spacer': 0}
201
+ counters['spacer'] += 1
202
+ spacer_node_id = f"Spacer[{counters['spacer']}]"
203
+ parts.append({
204
+ 'category': 'spacer',
205
+ 'source': ''.join(remaining_lines),
206
+ 'location': (prev_end + 1, total_lines + 1),
207
+ 'level': 0,
208
+ 'vector': create_vector('spacer', 0, (prev_end + 1, total_lines + 1), total_lines, []),
209
+ 'parent_path': 'Top-Level',
210
+ 'node_id': spacer_node_id
211
+ })
212
+ processed_lines.update(remaining_lines_set)
213
 
214
  return parts