import streamlit as st from huggingface_hub import InferenceClient client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.1") class GroupTherapyAgent: def __init__(self, client): self.client = client self.max_length = 64 def get_response(self, user_question): # Make an API call to the model and get the response response = self.client(inputs=user_question, parameters={"max_length": self.max_length, "num_beams": 4}, options={"wait_for_model": True}) return response['generated_text'] class GroupTherapyApplication: def __init__(self, client): self.agents = [GroupTherapyAgent(client) for _ in range(4)] def get_advice(self, user_question): advice = [] for agent in self.agents: response = agent.get_response(user_question) advice.append(response) return advice app = GroupTherapyApplication(client) advice = app.get_advice("I feel anxious when I have to speak in front of a group of people.") print(f"Advice from Agents:\n{advice}") # Streamlit App Layout st.title("Group Therapy Session App") # User question input user_question = st.text_area("Enter your question or share your experience:", height=150) # Button to submit question if st.button("Get Advice"): if user_question: # Call to the GroupTherapyApplication to get real responses responses = app.get_advice(user_question) for idx, response in enumerate(responses, start=1): st.markdown(f"**Agent {idx}:** {response}") else: st.warning("Please enter a question or experience to share.") # Footer st.markdown("---") st.caption("Disclaimer: The responses are simulated and for demonstration purposes only.")