Fanatic-Fandom / Scripts /RobloxWikiFilter.py
KaraKaraWitch's picture
Initial Commit
fb08e0a
# Purging roblox domain games base of word matching
# - Shinon
import multiprocessing
import re
from bs4 import BeautifulSoup
import orjson
import pathlib
import typer
from sqlitedict import SqliteDict
cpl = re.compile(r"(roblox | roblox)", flags=re.IGNORECASE)
app = typer.Typer()
def filter_worker(
chunked_jsonl: pathlib.Path, out_file: pathlib.Path, index_file: pathlib.Path
):
domains = set([i.strip() for i in index_file.read_text().split("\n") if i.strip()])
with open(chunked_jsonl, "rb") as f, open(out_file, "wb") as out_fp:
for line in f:
data = orjson.loads(line)
if data["domain"] in domains:
continue
out_fp.write(line)
def roblox_worker(file: pathlib.Path, index_file: pathlib.Path, txt_file: pathlib.Path):
with SqliteDict(index_file) as fout:
with open(file, "rb") as f:
sync_time = 0
for line in f:
data = orjson.loads(line)
# dbg = f"domain: {data['domain']} title: {data['page']}"
if "parse" not in data["content"]:
print(
"Missing parse content",
"domain",
data["domain"],
"title",
data["page"],
)
continue
soup = data["content"]["parse"]["text"]["*"]
composed_soup = (
re.sub(r"\n\s*\n", "\n\n", BeautifulSoup(soup, "lxml").get_text())
.replace("\n", "")
.strip()
)
robloxed = len(cpl.findall(composed_soup))
if data["domain"] not in fout:
fout[data["domain"]] = 0
fout[data["domain"]] += robloxed
sync_time += 1
if sync_time % 10000 == 0 and sync_time != 0:
fout.commit()
fout.commit()
with open(txt_file, "wb") as f:
for domain, roblox_count in fout.iteritems():
if roblox_count >= 2:
print(domain)
f.write(domain.encode() + b"\n")
def err_cb(e):
print(e)
@app.command()
def index(folder:pathlib.Path,index_folder:pathlib.Path):
with multiprocessing.Pool(processes=64) as pool:
fn = []
for file in pathlib.Path(folder).iterdir():
fn.append(
pool.apply_async(
roblox_worker,
args=(
pathlib.Path("v2.5-chunks") / f"{file.name}",
index_folder / f"{file.stem}.sqlite",
index_folder / f"{file.name}.domains.txt",
),
error_callback=err_cb,
)
)
for task in fn:
task.wait()
pool.close()
pool.join()
@app.command()
def main(folder:pathlib.Path,output_folder:pathlib.Path,domains_txt:pathlib.Path):
with multiprocessing.Pool(processes=64) as pool:
fn = []
for file in folder.iterdir():
fn.append(
pool.apply_async(
filter_worker,
args=(
file,
output_folder / f"{file.name}.jsonl",
domains_txt
),
error_callback=err_cb,
)
)
for task in fn:
task.wait()
pool.close()
pool.join()
app()
# main()