File size: 1,918 Bytes
5120311 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
from entity import InputCheckReview
from fastapi import FastAPI
import time
import hashlib
import json
from fastapi.middleware.cors import CORSMiddleware
import os
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
def get_hash_id(item: InputCheckReview):
str_hash = ""
if item.id_topic:
str_hash += item.id_topic
str_hash += item.start_time
return hashlib.sha224(str_hash.encode("utf-8")).hexdigest()
else:
return ""
@app.post("/newsanalysis/review/topic_clustering")
async def topic_clustering(item: InputCheckReview):
hash_str = get_hash_id(item)
print("item.id_topic: ", item.id_topic)
print("item.start_time: ", item.start_time)
print("item.end_time: ", item.end_time)
print(hash_str)
# if len(docs) > 200:
results = {"status": 1, "res": {}}
if hash_str and os.path.exists("log/result_{0}.txt".format(hash_str)):
path_res = "log/result_{0}.txt".format(hash_str)
with open(path_res, encoding="utf-8") as ff:
res_clus = json.load(ff)
return {"status": 1, "res": res_clus}
return results
@app.post("/newsanalysis/update/topic_clustering")
async def topic_clustering(item: InputCheckReview):
hash_str = get_hash_id(item)
print("item.id_topic: ", item.id_topic)
print("item.start_time: ", item.start_time)
print("item.end_time: ", item.end_time)
print(hash_str)
# if len(docs) > 200:
results = {"status": 0}
if hash_str and os.path.exists("log/result_{0}.txt".format(hash_str)):
path_res = "log/result_{0}.txt".format(hash_str)
with open(path_res, "w+", encoding="utf-8") as ff:
ff.write(json.dumps(item.data))
return {"status": 1}
return results
|