File size: 3,493 Bytes
f2169e5
e988eb0
 
 
0eefba5
b5b9af8
e988eb0
 
b5b9af8
 
0eefba5
 
 
 
 
 
 
b5b9af8
95b2ec1
 
e988eb0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
95b2ec1
0eefba5
 
 
 
 
 
 
e988eb0
b5b9af8
0eefba5
 
 
 
 
 
 
 
 
 
 
95b2ec1
e988eb0
 
0eefba5
e988eb0
 
 
0eefba5
 
 
 
e988eb0
0eefba5
e988eb0
 
 
 
0eefba5
e988eb0
 
0eefba5
 
 
 
 
 
 
 
e988eb0
 
 
 
 
 
 
 
0eefba5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
import os
import time
from datetime import datetime
import streamlit as st
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline

# -- SETUP --
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"

@st.cache_resource
def load_respondent():
    model_id = "microsoft/phi-2"  # switched to a coherent, safe small model
    tokenizer = AutoTokenizer.from_pretrained(model_id)
    model = AutoModelForCausalLM.from_pretrained(model_id)
    return pipeline("text-generation", model=model, tokenizer=tokenizer)

generator = load_respondent()

if "history" not in st.session_state:
    st.session_state.history = []
    st.session_state.summary = ""

# -- STYLING --
st.markdown("""
    <style>
    body {
        background-color: #111827;
        color: #f3f4f6;
    }
    .stTextInput > div > div > input {
        color: #f3f4f6;
    }
    </style>
""", unsafe_allow_html=True)

# -- HEADER --
st.title("🧠 TARS.help")
st.markdown("### A minimal AI that listens, reflects, and replies.")
st.markdown(f"πŸ—“οΈ {datetime.now().strftime('%B %d, %Y')} | {len(st.session_state.history)//2} exchanges")

# -- HIGH-RISK PHRASE FILTER --
TRIGGER_PHRASES = ["kill myself", "end it all", "suicide", "not worth living", "can't go on"]

def is_high_risk(text):
    return any(phrase in text.lower() for phrase in TRIGGER_PHRASES)

# -- INPUT --
user_input = st.text_input("How are you feeling today?", placeholder="Start typing...")

# -- REPLY FUNCTION --
def generate_reply(user_input, context):
    prompt = f"""You are a calm, helpful AI assistant who supports users emotionally. Be kind and thoughtful in your reply.

{context}
User: {user_input}
AI:"""
    response = generator(prompt, max_new_tokens=80, temperature=0.7)[0]['generated_text']
    return response.split("AI:")[-1].strip()

# -- CONVERSATION FLOW --
if user_input:
    context = "\n".join([f"{s}: {m}" for s, m, _ in st.session_state.history[-4:]])
    with st.spinner("TARS is reflecting..."):
        time.sleep(0.5)
        if is_high_risk(user_input):
            response = "I'm really sorry you're feeling this way. You're not alone β€” please talk to someone you trust or a mental health professional. πŸ’™"
        else:
            response = generate_reply(user_input, context)
        timestamp = datetime.now().strftime("%H:%M")
        st.session_state.history.append(("🧍 You", user_input, timestamp))
        st.session_state.history.append(("πŸ€– TARS", response, timestamp))

# -- DISPLAY HISTORY --
st.markdown("## πŸ—¨οΈ Session")
for speaker, msg, time in st.session_state.history:
    st.markdown(f"**{speaker} [{time}]:** {msg}")

# -- SESSION SUMMARY --
if st.button("🧾 Generate Session Summary"):
    convo = "\n".join([f"{s}: {m}" for s, m, _ in st.session_state.history])
    prompt = f"""You are summarizing a thoughtful conversation between a user and an AI assistant. Write a kind, reflective note based on this interaction.

Conversation:
{convo}

Summary:"""
    summary = generator(prompt, max_new_tokens=100, temperature=0.5)[0]['generated_text']
    st.session_state.summary = summary.split("Summary:")[-1].strip()

if st.session_state.summary:
    st.markdown("### 🧠 Session Note")
    st.markdown(st.session_state.summary)
    st.download_button("πŸ“₯ Download Summary", st.session_state.summary, file_name="tars_session.txt")

# -- FOOTER --
st.markdown("---")
st.caption("TARS is not a therapist but a friend. Just a quiet assistant that reflects with you.")