Spaces:
Sleeping
Sleeping
from flask import Flask, render_template, request, redirect, url_for | |
from joblib import load | |
import pandas as pd | |
import re | |
from customFunctions import * | |
import json | |
import datetime | |
pd.set_option('display.max_colwidth', 1000) | |
PIPELINES = [ | |
{ | |
'id': 1, | |
'name': 'Baseline', | |
'pipeline': load("pipeline_ex1_s1.joblib") | |
}, | |
{ | |
'id': 2, | |
'name': 'Trained on a FeedForward NN', | |
'pipeline': load("pipeline_ex1_s2.joblib") | |
}, | |
{ | |
'id': 3, | |
'name': 'Trained on a CRF', | |
'pipeline': load("pipeline_ex1_s3.joblib") | |
}, | |
#{ | |
# 'id': 4, | |
# 'name': 'Trained on a small dataset', | |
# 'pipeline': load("pipeline_ex2_s1.joblib") | |
#}, | |
#{ | |
# 'id': 5, | |
# 'name': 'Trained on a large dataset', | |
# 'pipeline': load("pipeline_ex2_s2.joblib") | |
#}, | |
#{ | |
# 'id': 6, | |
# 'name': 'Embedded using TFIDF', | |
# 'pipeline': load("pipeline_ex3_s1.joblib") | |
#}, | |
#{ | |
# 'id': 7, | |
# 'name': 'Embedded using ?', | |
# 'pipeline': load("pipeline_ex3_s2.joblib") | |
#}, | |
] | |
pipeline_metadata = [{'id': p['id'], 'name': p['name']} for p in PIPELINES] | |
def get_pipeline_by_id(pipelines, pipeline_id): | |
return next((p['pipeline'] for p in pipelines if p['id'] == pipeline_id), None) | |
def get_name_by_id(pipelines, pipeline_id): | |
return next((p['name'] for p in pipelines if p['id'] == pipeline_id), None) | |
def requestResults(text, pipeline): | |
labels = pipeline.predict(text) | |
print(labels.ndim) | |
if labels.ndim != 1: | |
flattened_predictions = [] | |
for sentence in labels: | |
for tag in sentence: | |
flattened_predictions.append(tag) | |
labels = flattened_predictions | |
print(labels) | |
labels = [int(label) for label in labels] | |
tag_encoder = LabelEncoder() | |
tag_encoder.fit(['B-AC', 'O', 'B-LF', 'I-LF']) | |
decoded_labels = tag_encoder.inverse_transform(labels) | |
return decoded_labels | |
LOG_FILE = "usage_log.jsonl" # Each line is a JSON object | |
def log_interaction(user_input, model_name, predictions): | |
log_entry = { | |
"timestamp": datetime.datetime.utcnow().isoformat(), | |
"user_input": user_input, | |
"model": model_name, | |
"predictions": predictions | |
} | |
with open(LOG_FILE, "a") as f: | |
f.write(json.dumps(log_entry) + "\n") | |
app = Flask(__name__) | |
def index(): | |
return render_template('index.html', pipelines= pipeline_metadata) | |
def get_data(): | |
if request.method == 'POST': | |
text = request.form['search'] | |
tokens = re.findall(r"\w+|[^\w\s]", text) | |
tokens_fomatted = pd.Series([pd.Series(tokens)]) | |
pipeline_id = int(request.form['pipeline_select']) | |
pipeline = get_pipeline_by_id(PIPELINES, pipeline_id) | |
name = get_name_by_id(PIPELINES, pipeline_id) | |
labels = requestResults(tokens_fomatted, pipeline) | |
results = dict(zip(tokens, labels)) | |
log_interaction(text, name, results) | |
return render_template('index.html', results=results, name=name, pipelines= pipeline_metadata) | |
if __name__ == '__main__': | |
app.run(host="0.0.0.0", port=7860) | |
#if __name__ == '__main__': | |
#app.run(host="0.0.0.0", port=7860) | |