KaraKaraWitch commited on
Commit
10ffde1
·
verified ·
1 Parent(s): b883eab

Upload scripts/stack_parser_large.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. scripts/stack_parser_large.py +462 -0
scripts/stack_parser_large.py ADDED
@@ -0,0 +1,462 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import multiprocessing
2
+ import random
3
+ import traceback
4
+ from lxml import etree
5
+ import pathlib
6
+ import orjson
7
+ import tqdm
8
+ import typer
9
+ import urllib.parse
10
+ from sqlitedict import SqliteDict
11
+
12
+ app = typer.Typer()
13
+
14
+
15
+ def fast_iter(context, func, *args, **kwargs):
16
+ """
17
+ http://lxml.de/parsing.html#modifying-the-tree
18
+ Based on Liza Daly's fast_iter
19
+ http://www.ibm.com/developerworks/xml/library/x-hiperfparse/
20
+ See also http://effbot.org/zone/element-iterparse.htm
21
+ """
22
+ for event, elem in context:
23
+ func(elem, *args, **kwargs)
24
+ # It's safe to call clear() here because no descendants will be
25
+ # accessed
26
+ elem.clear()
27
+ # Also eliminate now-empty references from the root node to elem
28
+ for ancestor in elem.xpath("ancestor-or-self::*"):
29
+ while ancestor.getprevious() is not None:
30
+ del ancestor.getparent()[0]
31
+ del context
32
+
33
+ @app.command()
34
+ def decadence_fp(stack_folder: pathlib.Path, output_file: pathlib.Path):
35
+
36
+ post_history = stack_folder / "PostHistory.xml"
37
+ if not post_history.exists():
38
+ raise Exception()
39
+ # posts_hist = {}
40
+ print("Working on", stack_folder.name)
41
+ sql_dict_posts = SqliteDict(
42
+ f"tmp/{stack_folder.name}.hist.tmp.sqlite", flag="n", tablename="dict_hist"
43
+ )
44
+ try:
45
+ with tqdm.tqdm() as pbar:
46
+ ctx = etree.iterparse(post_history, tag="row")
47
+
48
+ def element_processor2(elem):
49
+ p_type = int(elem.get("PostHistoryTypeId"))
50
+ pid: str = elem.get("PostId")
51
+ if p_type in range(1, 4):
52
+ if pid in sql_dict_posts:
53
+ post_data = sql_dict_posts[pid]
54
+ else:
55
+ post_data = sql_dict_posts[pid] = {}
56
+ if p_type == 1:
57
+ post_data["title"] = urllib.parse.unquote(
58
+ elem.get("Text", "")
59
+ ).replace("\r\n", "\n")
60
+ elif p_type == 2:
61
+ post_data["body"] = urllib.parse.unquote(
62
+ elem.get("Text", "")
63
+ ).replace("\r\n", "\n")
64
+ elif p_type == 3:
65
+ post_data["tags"] = urllib.parse.unquote(
66
+ elem.get("Text", "")
67
+ ).replace("\r\n", "\n")
68
+ # print(post_data)
69
+ sql_dict_posts[pid] = post_data
70
+ pbar.update(1)
71
+ elif p_type in range(4, 10):
72
+ if pid in sql_dict_posts:
73
+ post_data = sql_dict_posts[pid]
74
+ else:
75
+ # print(f"[W] {pid}, {stack_folder.name} appears to be a partial?")
76
+ post_data = sql_dict_posts[pid] = {}
77
+ post_data["partial"] = True
78
+ if p_type in [4, 7]:
79
+ post_data["title"] = urllib.parse.unquote(
80
+ elem.get("Text", "")
81
+ ).replace("\r\n", "\n")
82
+ elif p_type in [5, 8]:
83
+ post_data["body"] = urllib.parse.unquote(
84
+ elem.get("Text", "")
85
+ ).replace("\r\n", "\n")
86
+ elif p_type in [6, 9]:
87
+ post_data["tags"] = urllib.parse.unquote(
88
+ elem.get("Text", "")
89
+ ).replace("\r\n", "\n")
90
+ sql_dict_posts[pid] = post_data
91
+ pbar.update(1)
92
+ elif p_type == 35:
93
+ if pid in sql_dict_posts:
94
+ post_data = sql_dict_posts[pid]
95
+ else:
96
+ print(
97
+ f"[W] {pid}, {stack_folder.name} requested to be redirected but redirect doesn't seem to exist?"
98
+ )
99
+ post_data = {}
100
+ if not post_data.get("partial", False):
101
+ print(f"[I] {pid}, {stack_folder.name} Not partial?")
102
+ return
103
+ print(post_data)
104
+ elif p_type == 10:
105
+ if pid not in sql_dict_posts:
106
+ return
107
+ post_data = sql_dict_posts[pid]
108
+ if not post_data:
109
+ raise Exception
110
+ post_data["closed"] = True
111
+ sql_dict_posts[pid] = post_data
112
+ pbar.update(1)
113
+ elif p_type == 11:
114
+ if pid not in sql_dict_posts:
115
+ return
116
+ post_data = sql_dict_posts[pid]
117
+ if not post_data:
118
+ raise Exception
119
+ post_data["closed"] = False
120
+ sql_dict_posts[pid] = post_data
121
+ pbar.update(1)
122
+ elif p_type == 12:
123
+ if pid not in sql_dict_posts:
124
+ return
125
+ post_data = sql_dict_posts[pid]
126
+ if not post_data:
127
+ raise Exception
128
+ post_data["deleted"] = True
129
+ sql_dict_posts[pid] = post_data
130
+ pbar.update(1)
131
+ elif p_type == 13:
132
+ if pid not in sql_dict_posts:
133
+ return
134
+ post_data = sql_dict_posts[pid]
135
+ if not post_data:
136
+ raise Exception
137
+ post_data["deleted"] = False
138
+ sql_dict_posts[pid] = post_data
139
+ pbar.update(1)
140
+ elif p_type == 14:
141
+ if pid not in sql_dict_posts:
142
+ return
143
+ post_data = sql_dict_posts[pid]
144
+ if not post_data:
145
+ raise Exception
146
+ post_data["locked"] = True
147
+ sql_dict_posts[pid] = post_data
148
+ pbar.update(1)
149
+ elif p_type == 15:
150
+ if pid not in sql_dict_posts:
151
+ return
152
+ post_data = sql_dict_posts[pid]
153
+ if not post_data:
154
+ raise Exception
155
+ post_data["locked"] = False
156
+ sql_dict_posts[pid] = post_data
157
+ pbar.update(1)
158
+ # print(etree.tostring(element))
159
+ if pbar.n % 5000 == 0 and pbar.n != 0:
160
+ # print(sql_dict_posts.conn.reqs.qsize())
161
+ sql_dict_posts.commit()
162
+ # elem.clear()
163
+
164
+ fast_iter(ctx, element_processor2)
165
+ except Exception as e:
166
+ print("[!] ERR: ", traceback.format_exception(e))
167
+ flagged = set()
168
+ for k, v in sql_dict_posts.items():
169
+ if not v.get("body").strip():
170
+ flagged.add(k)
171
+ if v.get("deleted", False):
172
+ flagged.add(k)
173
+ for fg in flagged:
174
+ del sql_dict_posts[fg]
175
+ sql_dict_posts.sync()
176
+ print("Reconstruct done!")
177
+ sql_dict_posts.close()
178
+
179
+ @app.command()
180
+ def decadence(stack_folder: pathlib.Path, output_file: pathlib.Path):
181
+
182
+ posts = stack_folder / "Posts.xml"
183
+ if not posts.exists():
184
+ raise Exception()
185
+ # reconstruct = {}
186
+
187
+ sql_dict_posts = SqliteDict(
188
+ f"tmp/{stack_folder.name}.hist.tmp.sqlite", flag="r", tablename="dict_hist"
189
+ )
190
+
191
+ main_qn_posts = SqliteDict(
192
+ f"tmp/{stack_folder.name}.s2.hist.tmp.sqlite", flag="n", tablename="qa_items",
193
+ )
194
+ print("processing", posts)
195
+ try:
196
+ # Prepare all the Main Question posts
197
+ with tqdm.tqdm() as bar:
198
+ ctx2 = etree.iterparse(posts, tag="row")
199
+ sync = 0
200
+ def element_processor(element):
201
+ nonlocal sync
202
+ pid = element.get("Id")
203
+ p_type = int(element.get("PostTypeId"))
204
+
205
+ if p_type == 1:
206
+ if pid not in sql_dict_posts:
207
+ main_qn_posts[pid] = {"Stub":True}
208
+ print(
209
+ f"[!] Question: {int(element.get('Id'))} {stack_folder.name} does not exist but referanced?"
210
+ )
211
+ return
212
+ main_qn_posts[pid] = sql_dict_posts[pid]
213
+ main_qn_posts[pid]["Stub"] = False
214
+ main_qn_posts[pid]["Id"] = pid
215
+ main_qn_posts[pid]["CreationDate"] = element.get("CreationDate")
216
+ main_qn_posts[pid]["Score"] = int(element.get("Score"))
217
+ main_qn_posts[pid]["Accepted"] = (
218
+ int(element.get("AcceptedAnswerId"))
219
+ if element.get("AcceptedAnswerId")
220
+ else None
221
+ )
222
+ main_qn_posts[pid]["Counts"] = {
223
+ "Views": int(element.get("ViewCount"))
224
+ if element.get("ViewCount")
225
+ else 0,
226
+ "Answers": int(element.get("AnswerCount"))
227
+ if element.get("AnswerCount")
228
+ else 0,
229
+ "Comments": int(element.get("CommentCount"))
230
+ if element.get("CommentCount")
231
+ else 0,
232
+ }
233
+ if sync >= 1000:
234
+ sync = 0
235
+ # print("Sync.")
236
+ main_qn_posts.sync()
237
+ sync += 1
238
+ bar.update(1)
239
+
240
+ fast_iter(ctx2, element_processor)
241
+ main_qn_posts.sync()
242
+ except Exception as e:
243
+ print("[!] ERR: ", traceback.format_exception(e))
244
+
245
+ print("2nd Pass Posts Done")
246
+ # Match all the Answer posts
247
+ try:
248
+ with tqdm.tqdm() as bar:
249
+ ctx3 = etree.iterparse(posts, tag="row")
250
+ sync = 0
251
+ def element_processor3(element):
252
+ nonlocal sync
253
+ pid = element.get("Id")
254
+ p_type = int(element.get("PostTypeId"))
255
+ if p_type == 2:
256
+ parent_id = element.get("ParentId")
257
+ if parent_id not in main_qn_posts:
258
+ print(
259
+ f"[!] Answer: {int(element.get('Id'))} {stack_folder.name} has no parent attached to it!"
260
+ )
261
+ return
262
+ root = main_qn_posts[parent_id]
263
+ answers = root.setdefault("answers", [])
264
+ if pid in sql_dict_posts:
265
+ rec_answer = sql_dict_posts[pid]
266
+ else:
267
+ rec_answer = None
268
+ if rec_answer is None:
269
+ print(
270
+ f"[!] Answer: {int(element.get('Id'))} {stack_folder.name} does not have a PostHistory.xml Assoc. with it!"
271
+ )
272
+ root["answers"] = answers
273
+ return
274
+ rec_answer["Id"] = int(element.get("Id"))
275
+ rec_answer["Score"] = (
276
+ int(element.get("Score")) if element.get("Score") else 0
277
+ )
278
+ rec_answer["Counts"] = {
279
+ "Views": int(element.get("ViewCount"))
280
+ if element.get("ViewCount")
281
+ else 0,
282
+ "Comments": int(element.get("CommentCount"))
283
+ if element.get("CommentCount")
284
+ else 0,
285
+ }
286
+ answers.append(rec_answer)
287
+ root["answers"] = answers
288
+ main_qn_posts[parent_id] = root
289
+ if sync >= 1000:
290
+ sync = 0
291
+ print("Sync.")
292
+ main_qn_posts.sync()
293
+ sync += 1
294
+ bar.update(1)
295
+
296
+ fast_iter(ctx3, element_processor3)
297
+ main_qn_posts.sync()
298
+ except Exception as e:
299
+ print("[!] ERR: ", traceback.format_exception(e))
300
+ print("2nd Pass done.", output_file.name)
301
+ try:
302
+ with open(output_file, "wb") as f:
303
+ for k, v in main_qn_posts.items():
304
+ f.write(orjson.dumps(v) + b"\n")
305
+ sql_dict_posts.close()
306
+ pathlib.Path(f"tmp/{stack_folder.name}.hist.tmp.sqlite").unlink()
307
+ except Exception as e:
308
+ print("[!] ERR: ", traceback.format_exception(e))
309
+
310
+
311
+ @app.command()
312
+ def convo_processor(base_file: pathlib.Path, qa_file: pathlib.Path):
313
+ with open(qa_file, "wb") as fw:
314
+ with open(base_file, "rb") as f:
315
+ for line in f:
316
+ data = orjson.loads(line)
317
+ if "body" not in data or "title" not in data:
318
+ continue
319
+ if data["body"].lower().startswith(data["title"].lower()):
320
+ question = f'{data["title"].strip()}\n\n{data["body"][len(data["title"]) :].strip()}'
321
+ else:
322
+ question = f'{data["title"].strip()}\n\n{data["body"].strip()}'
323
+ if data.get("answers"):
324
+ for answer in data.get("answers"):
325
+ if "Score" not in answer:
326
+ print("no score?",data)
327
+ continue
328
+ if answer["Score"] >= 0:
329
+ zz = {
330
+ "conversation": [
331
+ {"sender": "user", "message": question},
332
+ {"sender": "assistant", "message": answer},
333
+ ],
334
+ "meta": {
335
+ "q_score": data.get("Score",0),
336
+ "a_score": answer["Score"],
337
+ },
338
+ }
339
+ fw.write(orjson.dumps(zz) + b"\n")
340
+ pass
341
+
342
+
343
+ @app.command()
344
+ def qa_processor(base_file: pathlib.Path, qa_file: pathlib.Path, formats: pathlib.Path):
345
+ question_formats = orjson.loads(formats.read_bytes())
346
+ with open(qa_file, "wb") as fw:
347
+ with open(base_file, "rb") as f:
348
+ for line in f:
349
+ data = orjson.loads(line)
350
+ if "body" not in data or "title" not in data:
351
+ continue
352
+ if data["body"].lower().startswith(data["title"].lower()):
353
+ question = f'{data["title"].strip()}\n\n{data["body"][len(data["title"]) :].strip()}'
354
+ else:
355
+ question = f'{data["title"].strip()}\n\n{data["body"].strip()}'
356
+ if data.get("answers"):
357
+ for answer in data.get("answers"):
358
+ if answer["Score"] >= 0:
359
+ fmt = random.choice(question_formats)
360
+ fmt = fmt.format(question=question, answer=answer["body"])
361
+ zz = {
362
+ "text": fmt,
363
+ "meta": {
364
+ "q_score": data.get("Score",0),
365
+ "a_score": answer["Score"],
366
+ },
367
+ }
368
+ fw.write(orjson.dumps(zz) + b"\n")
369
+ pass
370
+
371
+ def err_handler(e):
372
+ traceback.print_exception(e)
373
+
374
+ @app.command()
375
+ def convo_stackstack(folder: pathlib.Path):
376
+ with multiprocessing.Pool(processes=64) as pool:
377
+ fn = []
378
+ for item in folder.iterdir():
379
+ if item.is_file() and item.suffix.endswith("jsonl"):
380
+ # print(item)
381
+ fn.append(
382
+ pool.apply_async(
383
+ convo_processor,
384
+ (
385
+ item,
386
+ pathlib.Path("convo-large")
387
+ / pathlib.Path(f"{item.stem}_convo.jsonl")
388
+ ),
389
+ error_callback=err_handler)
390
+ )
391
+ for f in fn:
392
+ f.wait()
393
+ pool.close()
394
+ pool.join
395
+
396
+ @app.command()
397
+ def qa_stackstack(folder: pathlib.Path):
398
+ with multiprocessing.Pool(processes=64) as pool:
399
+ fn = []
400
+ for item in folder.iterdir():
401
+ if item.is_file() and item.suffix.endswith("jsonl"):
402
+ # print(item)
403
+ fn.append(
404
+ pool.apply_async(
405
+ qa_processor,
406
+ (
407
+ item,
408
+ pathlib.Path("qa-large")
409
+ / pathlib.Path(f"{item.stem}_qa.jsonl"),
410
+ pathlib.Path("staccato_format.json")
411
+ ),
412
+ error_callback=err_handler)
413
+ )
414
+ for f in fn:
415
+ f.wait()
416
+ pool.close()
417
+ pool.join()
418
+
419
+ @app.command()
420
+ def fp_stackstack(folder: pathlib.Path):
421
+ with multiprocessing.Pool(processes=6) as pool:
422
+ fn = []
423
+ for item in folder.iterdir():
424
+ if item.is_dir() and "meta." not in item.name:
425
+ # print(item)
426
+ fn.append(
427
+ pool.apply_async(
428
+ decadence_fp,
429
+ (
430
+ item,
431
+ pathlib.Path("large-parsed")
432
+ / pathlib.Path(f"{item.name}_raw.jsonl"),
433
+ ),
434
+ )
435
+ )
436
+ for f in fn:
437
+ f.wait()
438
+ pool.close()
439
+ pool.join()
440
+
441
+ @app.command()
442
+ def stackstack(folder: pathlib.Path):
443
+ with multiprocessing.Pool(processes=8) as pool:
444
+ fn = []
445
+ for item in folder.iterdir():
446
+ if item.is_dir() and "meta." not in item.name:
447
+ # print(item)
448
+ fn.append(
449
+ pool.apply_async(
450
+ decadence,
451
+ (
452
+ item,
453
+ pathlib.Path("large-parsed")
454
+ / pathlib.Path(f"{item.name}_raw.jsonl"),
455
+ ),
456
+ )
457
+ )
458
+ for f in fn:
459
+ f.wait()
460
+
461
+
462
+ app()