File size: 3,168 Bytes
5120311 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 |
from entity import Docs, Cluster, Preprocess, SummaryInput
from fastapi import FastAPI
import time
import hashlib
import json
from fastapi.middleware.cors import CORSMiddleware
from function import topic_clustering_social as tc
# from function import topic_clustering_v2 as tc
from iclibs.ic_rabbit import ICRabbitMQ
from get_config import config_params
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
def get_hash_id(item: Docs):
str_hash = ""
for it in item.response["docs"]:
str_hash += it["url"]
str_hash += str(item.top_cluster)
str_hash += str(item.top_sentence)
str_hash += str(item.topn_summary)
str_hash += str(item.top_doc)
str_hash += str(item.threshold)
if item.sorted_field.strip():
str_hash += str(item.sorted_field)
if item.delete_message:
str_hash += str(item.delete_message)
return hashlib.sha224(str_hash.encode("utf-8")).hexdigest()
try:
with open("log_run/log.txt") as f:
data_dict = json.load(f)
except Exception as ve:
print(ve)
data_dict = {}
@app.post("/newsanalysis/topic_clustering")
async def topic_clustering(item: Docs):
docs = item.response["docs"]
# threshold = item.threshold
print("start ")
print("len doc: ", len(docs))
st_time = time.time()
top_cluster = item.top_cluster
top_sentence = item.top_sentence
topn_summary = item.topn_summary
sorted_field = item.sorted_field
max_doc_per_cluster = item.max_doc_per_cluster
hash_str = get_hash_id(item)
# threshold = 0.1
# item.threshold = threshold
# with open("log/input_{0}.txt".format(st_time), "w+") as f:
# f.write(json.dumps({"docs": docs, "key": item.keyword}))
print(hash_str)
if len(docs) > 200:
try:
if hash_str in data_dict:
path_res = data_dict[hash_str]["response_path"]
with open(path_res) as ff:
results = json.load(ff)
print("time analysis (cache): ", time.time() - st_time)
return results
except Exception as vee:
print(vee)
results = tc.topic_clustering(docs, item.threshold, top_cluster=top_cluster, top_sentence=top_sentence,
topn_summary=topn_summary, sorted_field=sorted_field, max_doc_per_cluster=max_doc_per_cluster, delete_message=item.delete_message, is_check_spam=item.is_check_spam)
path_res = "log/result_{0}.txt".format(hash_str)
with open(path_res, "w+") as ff:
ff.write(json.dumps(results))
data_dict[hash_str] = {"time": st_time, "response_path": path_res}
lst_rm = []
for dt in data_dict:
if time.time() - data_dict[dt]["time"] > 30 * 24 * 3600:
lst_rm.append(dt)
for dt in lst_rm:
del data_dict[dt]
with open("log_run/log.txt", "w+") as ff:
ff.write(json.dumps(data_dict))
print("time analysis: ", time.time() - st_time)
return results |