Spaces:
Runtime error
Runtime error
Update interface.py
Browse files- interface.py +42 -14
interface.py
CHANGED
@@ -1,25 +1,22 @@
|
|
1 |
-
# interface.py
|
2 |
-
# (Gradio UI definition and sample data generator)
|
3 |
-
|
4 |
import gradio as gr
|
5 |
import spaces
|
6 |
import json
|
|
|
|
|
7 |
from datetime import datetime, timedelta
|
8 |
import numpy as np
|
9 |
from analyzer import TetherProAnalyzer
|
10 |
|
|
|
11 |
def create_sample_data():
|
12 |
"""Generate sample conversation data for testing"""
|
13 |
sample_data = []
|
14 |
base_date = datetime.now() - timedelta(days=30)
|
15 |
-
|
16 |
for i in range(15):
|
17 |
base_score = 25 + (i * 3.5)
|
18 |
if i % 5 == 0:
|
19 |
base_score += 20
|
20 |
-
|
21 |
abuse_score = min(95, max(5, base_score + np.random.normal(0, 8)))
|
22 |
-
|
23 |
sample_data.append({
|
24 |
'timestamp': (base_date + timedelta(days=i*2)).isoformat(),
|
25 |
'abuse_score': round(abuse_score, 1),
|
@@ -29,32 +26,63 @@ def create_sample_data():
|
|
29 |
'emotional_tone': 'menacing_calm' if abuse_score > 70 else 'neutral',
|
30 |
'risk_level': 'high' if abuse_score > 70 else 'moderate' if abuse_score > 40 else 'low'
|
31 |
})
|
32 |
-
|
33 |
return json.dumps(sample_data, indent=2)
|
34 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
def create_tether_pro_interface():
|
36 |
css = """
|
37 |
.gradio-container { max-width: 1200px !important; margin: 0 auto !important; }
|
38 |
-
/* …your other CSS… */
|
39 |
"""
|
40 |
with gr.Blocks(css=css) as demo:
|
41 |
gr.Markdown("# Tether Pro Conversation Analyzer")
|
42 |
with gr.Row():
|
43 |
-
|
|
|
|
|
|
|
44 |
run_button = gr.Button("Analyze")
|
45 |
output = gr.JSON(label="Analysis Results")
|
46 |
run_button.click(
|
47 |
-
fn=
|
48 |
-
inputs=
|
49 |
outputs=output
|
50 |
)
|
51 |
-
|
52 |
gr.Markdown("## Sample Data")
|
53 |
sample_button = gr.Button("Load Sample")
|
54 |
sample_output = gr.Code(label="Sample JSON")
|
55 |
sample_button.click(fn=create_sample_data, inputs=None, outputs=sample_output)
|
56 |
-
|
57 |
return demo
|
58 |
|
59 |
-
|
60 |
demo = create_tether_pro_interface()
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
import spaces
|
3 |
import json
|
4 |
+
import os
|
5 |
+
import pandas as pd
|
6 |
from datetime import datetime, timedelta
|
7 |
import numpy as np
|
8 |
from analyzer import TetherProAnalyzer
|
9 |
|
10 |
+
|
11 |
def create_sample_data():
|
12 |
"""Generate sample conversation data for testing"""
|
13 |
sample_data = []
|
14 |
base_date = datetime.now() - timedelta(days=30)
|
|
|
15 |
for i in range(15):
|
16 |
base_score = 25 + (i * 3.5)
|
17 |
if i % 5 == 0:
|
18 |
base_score += 20
|
|
|
19 |
abuse_score = min(95, max(5, base_score + np.random.normal(0, 8)))
|
|
|
20 |
sample_data.append({
|
21 |
'timestamp': (base_date + timedelta(days=i*2)).isoformat(),
|
22 |
'abuse_score': round(abuse_score, 1),
|
|
|
26 |
'emotional_tone': 'menacing_calm' if abuse_score > 70 else 'neutral',
|
27 |
'risk_level': 'high' if abuse_score > 70 else 'moderate' if abuse_score > 40 else 'low'
|
28 |
})
|
|
|
29 |
return json.dumps(sample_data, indent=2)
|
30 |
|
31 |
+
|
32 |
+
def load_messages_from_file(file_obj):
|
33 |
+
"""
|
34 |
+
Read uploaded CSV or Excel file and convert to JSON string of message dicts.
|
35 |
+
"""
|
36 |
+
ext = os.path.splitext(file_obj.name)[1].lower()
|
37 |
+
try:
|
38 |
+
if ext == '.csv':
|
39 |
+
df = pd.read_csv(file_obj.name)
|
40 |
+
elif ext in ['.xls', '.xlsx']:
|
41 |
+
df = pd.read_excel(file_obj.name)
|
42 |
+
else:
|
43 |
+
return {'error': 'Unsupported file type. Please upload a .csv or .xlsx file.'}
|
44 |
+
except Exception as e:
|
45 |
+
return {'error': f'Failed to read file: {e}'}
|
46 |
+
|
47 |
+
messages_data = df.to_dict(orient='records')
|
48 |
+
return json.dumps(messages_data)
|
49 |
+
|
50 |
+
|
51 |
+
def analyze_uploaded_file(file_obj):
|
52 |
+
"""
|
53 |
+
Wrapper to load messages from file and analyze via TetherProAnalyzer.
|
54 |
+
"""
|
55 |
+
messages_json = load_messages_from_file(file_obj)
|
56 |
+
if isinstance(messages_json, dict):
|
57 |
+
return messages_json
|
58 |
+
return TetherProAnalyzer().analyze_conversation_history(messages_json)
|
59 |
+
|
60 |
+
|
61 |
def create_tether_pro_interface():
|
62 |
css = """
|
63 |
.gradio-container { max-width: 1200px !important; margin: 0 auto !important; }
|
|
|
64 |
"""
|
65 |
with gr.Blocks(css=css) as demo:
|
66 |
gr.Markdown("# Tether Pro Conversation Analyzer")
|
67 |
with gr.Row():
|
68 |
+
file_input = gr.File(
|
69 |
+
label="Upload conversation file (.csv or .xlsx)",
|
70 |
+
file_types=[".csv", ".xlsx"]
|
71 |
+
)
|
72 |
run_button = gr.Button("Analyze")
|
73 |
output = gr.JSON(label="Analysis Results")
|
74 |
run_button.click(
|
75 |
+
fn=analyze_uploaded_file,
|
76 |
+
inputs=file_input,
|
77 |
outputs=output
|
78 |
)
|
79 |
+
|
80 |
gr.Markdown("## Sample Data")
|
81 |
sample_button = gr.Button("Load Sample")
|
82 |
sample_output = gr.Code(label="Sample JSON")
|
83 |
sample_button.click(fn=create_sample_data, inputs=None, outputs=sample_output)
|
84 |
+
|
85 |
return demo
|
86 |
|
87 |
+
|
88 |
demo = create_tether_pro_interface()
|