badr-mardi commited on
Commit
600b6c0
·
verified ·
1 Parent(s): 78216e3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -40
app.py CHANGED
@@ -1,51 +1,44 @@
 
1
  import streamlit as st
2
- from huggingface_hub import InferenceClient, InferenceTimeoutError
3
- import asyncio
4
 
5
- # Initialize the Inference Client
6
- model_id = "gpt-3.5-turbo" # Replace with your desired model ID
7
- api_token = "gym8F1vSoz4RazTSa8D9YjuINil8VLV6oUHqJ2jh" # Replace with your actual Hugging Face API token
8
 
9
- client = InferenceClient(model=model_id, token=api_token)
 
 
 
10
 
 
 
 
11
 
12
- # Function to get response synchronously
13
- def get_response_sync(prompt):
14
- try:
15
- response = client.text_generation(prompt=prompt)
16
- return response[0]['generated_text']
17
- except InferenceTimeoutError as e:
18
- return f"Request timed out: {e}"
19
 
20
- # Function to get response asynchronously
21
- async def get_response_async(prompt):
22
- try:
23
- response = await client.text_generation_async(prompt=prompt)
24
- return response[0]['generated_text']
25
- except InferenceTimeoutError as e:
26
- return f"Request timed out: {e}"
27
 
28
- # Streamlit App
29
- st.title("Hugging Face Chatbot with Streamlit")
30
- st.write("This is a chatbot powered by Hugging Face's Inference API.")
31
 
32
- # Input and button
33
- user_input = st.text_input("You: ", "")
34
- if st.button("Send"):
35
- if user_input:
36
- # Synchronous response
37
- response = get_response_sync(user_input)
38
- st.write(f"Bot: {response}")
39
-
40
- # Asynchronous interaction using asyncio
41
- async def async_interaction():
42
- if user_input:
43
- response = await get_response_async(user_input)
44
- st.write(f"Bot: {response}")
45
-
46
- if st.button("Send (Async)"):
47
- if user_input:
48
- asyncio.run(async_interaction())
49
 
 
 
 
50
 
 
51
 
 
 
 
 
 
 
 
 
 
 
 
1
+ `
2
  import streamlit as st
3
+ import Cohere
4
+ from streamlit_chat import message
5
 
 
 
 
6
 
7
+ def api_calling(question):
8
+ model = Cohere(cohere_api_key='gym8F1vSoz4RazTSa8D9YjuINil8VLV6oUHqJ2jh')
9
+ response = model.invoke(question)
10
+ return response
11
 
12
+ st.title("COHERE CHATBOT BADR")
13
+ if 'user_input' not in st.session_state:
14
+ st.session_state['user_input'] = []
15
 
16
+ if 'cohere_response' not in st.session_state:
17
+ st.session_state['cohere_response'] = []
 
 
 
 
 
18
 
19
+ def get_text():
20
+ input_text = st.text_input("write here", key="input")
21
+ return input_text
 
 
 
 
22
 
23
+ user_input = get_text()
 
 
24
 
25
+ if user_input:
26
+ output = api_calling(user_input)
27
+ output = output.lstrip("\n")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
 
29
+ # Store the output
30
+ st.session_state.cohere_response.append(user_input)
31
+ st.session_state.user_input.append(output)
32
 
33
+ message_history = st.empty()
34
 
35
+ if st.session_state['user_input']:
36
+ for i in range(len(st.session_state['user_input']) - 1, -1, -1):
37
+ # This function displays user input
38
+ message(st.session_state["user_input"][i],
39
+ key=str(i),avatar_style="icons")
40
+ # This function displays OpenAI response
41
+ message(st.session_state['openai_response'][i],
42
+ avatar_style="miniavs",is_user=True,
43
+ key=str(i) + 'data_by_user')
44
+ `