Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from huggingface_hub import InferenceClient
|
3 |
+
|
4 |
+
# Initialize the Hugging Face Inference API client
|
5 |
+
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta", token="hf_gzFQTPmbxocKx"+"wyjRVSzJMWLdHCsZyQIjz")
|
6 |
+
|
7 |
+
def send_message(message):
|
8 |
+
messages = [{"role": "user", "content": message}]
|
9 |
+
response = client.chat_completion(messages, max_tokens=150, stream=True)
|
10 |
+
return response
|
11 |
+
|
12 |
+
def main():
|
13 |
+
st.title("Chat with AI")
|
14 |
+
|
15 |
+
# Text area for user input
|
16 |
+
user_input = st.text_input("Type your message:", key="user_input")
|
17 |
+
|
18 |
+
if st.button("Send"):
|
19 |
+
with st.spinner('AI is typing...'):
|
20 |
+
# Send message and display response incrementally
|
21 |
+
for token in send_message(user_input):
|
22 |
+
for choice in token.choices:
|
23 |
+
st.write(choice.delta.content, end="")
|
24 |
+
|
25 |
+
if __name__ == "__main__":
|
26 |
+
main()
|