berito commited on
Commit
ddeed03
ยท
verified ยท
1 Parent(s): 3b8e387

Upload app.py

Browse files

response not displayed corrected

Files changed (1) hide show
  1. app.py +17 -54
app.py CHANGED
@@ -1,5 +1,5 @@
1
  import streamlit as st
2
- from transformers import AutoTokenizer, AutoModelForCausalLM, TextIteratorStreamer, pipeline
3
  from threading import Thread
4
 
5
  # Model Initialization
@@ -28,38 +28,13 @@ def load_model():
28
  tokenizer, llama_pipeline = load_model()
29
 
30
  # Generate text
31
- def generate_response(prompt, chat_history, max_new_tokens):
32
- history = []
33
-
34
- # Build chat history
35
- for sent, received in chat_history:
36
- history.append({"role": "user", "content": sent})
37
- history.append({"role": "assistant", "content": received})
38
-
39
- history.append({"role": "user", "content": prompt})
40
-
41
- if len(tokenizer.apply_chat_template(history)) > 512:
42
- return "Chat history is too long."
43
- else:
44
- streamer = TextIteratorStreamer(
45
- tokenizer=tokenizer,
46
- skip_prompt=True,
47
- skip_special_tokens=True,
48
- timeout=300.0
49
- )
50
- thread = Thread(target=llama_pipeline, kwargs={
51
- "text_inputs": history,
52
- "max_new_tokens": max_new_tokens,
53
- "repetition_penalty": 1.15,
54
- "streamer": streamer
55
- })
56
- thread.start()
57
-
58
- generated_text = ""
59
- for word in streamer:
60
- generated_text += word
61
- response = generated_text.strip()
62
- yield response
63
 
64
  # Sidebar: Configuration
65
  st.sidebar.header("Chatbot Configuration")
@@ -73,18 +48,13 @@ examples = [
73
  "แ‹จแŠ แˆ›แˆญแŠ› แŒแŒฅแˆ แƒแแˆแŠ",
74
  "แ‰ฐแˆจแ‰ต แŠ•แŒˆแˆจแŠ\n\nแŒ…แ‰ฅแŠ“ แŠ แŠ•แ‰ แˆณ",
75
  "แŠ แŠ•แ‹ต แŠ แˆตแ‰‚แŠ แ‰€แˆแ‹ต แŠ•แŒˆแˆจแŠ",
76
- "แ‹จแ‰ฐแˆฐแŒ แ‹ แŒฝแˆ‘แ แŠ แˆตแ‰ฐแ‹ซแ‹จแ‰ต แˆแŠ• แŠ แ‹ญแŠแ‰ต แŠแ‹? 'แŠ แ‹ŽแŠ•แ‰ณแ‹Š'แฃ 'แŠ แˆ‰แ‰ณแ‹Š' แ‹ˆแ‹ญแˆ 'แŒˆแˆˆแˆแ‰ฐแŠ›' แ‹จแˆšแˆ แˆแˆ‹แˆฝ แˆตแŒฅแข 'แŠ แˆชแ แŠแˆแˆ แŠแ‰ แˆญ'",
77
  "แ‹จแˆแˆจแŠ•แˆณแ‹ญ แ‹‹แŠ“ แŠจแ‰ฐแˆ› แˆตแˆ แˆแŠ•แ‹ตแŠ• แŠแ‹?",
78
  "แŠ แˆแŠ• แ‹จแŠ แˆœแˆชแŠซ แ•แˆฌแ‹šแ‹ณแŠ•แ‰ต แˆ›แŠ• แŠแ‹?",
79
- "แˆถแˆตแ‰ต แ‹จแŠ แแˆชแŠซ แˆ€แŒˆแˆซแ‰ต แŒฅแ‰€แˆตแˆแŠ",
80
- "3 แ‹จแŠ แˆœแˆชแŠซ แˆ˜แˆชแ‹Žแ‰ฝแŠ• แˆตแˆ แŒฅแ‰€แˆต",
81
- "5 แ‹จแŠ แˆœแˆชแŠซ แŠจแ‰ฐแˆ›แ‹Žแ‰ฝแŠ• แŒฅแ‰€แˆต",
82
- "แŠ แˆแˆตแ‰ต แ‹จแŠ แ‹แˆฎแ“ แˆ€แŒˆแˆฎแ‰ฝแŠ• แŒฅแ‰€แˆตแˆแŠ",
83
- "แ‰  แ‹“แˆˆแˆ แˆ‹แ‹ญ แ‹ซแˆ‰แ‰ตแŠ• 7 แŠ แˆ…แŒ‰แˆซแ‰ต แŠ•แŒˆแˆจแŠ"
84
  ]
85
 
86
  st.subheader("Chat with the Amharic Chatbot")
87
- chat_history = st.session_state.get("chat_history", [])
 
88
 
89
  # Example selector
90
  example = st.selectbox("Choose an example:", ["Type your own message"] + examples)
@@ -94,21 +64,14 @@ user_input = st.text_input("Your message:", value=example if example != "Type yo
94
 
95
  if st.button("Send"):
96
  if user_input:
97
- st.session_state.chat_history = st.session_state.get("chat_history", [])
98
- st.session_state.chat_history.append((user_input, ""))
99
- responses = generate_response(user_input, st.session_state.chat_history, max_tokens)
100
-
101
- # Stream output
102
  with st.spinner("Generating response..."):
103
- final_response = ""
104
- for response in responses:
105
- final_response = response
106
- st.session_state.chat_history[-1] = (user_input, final_response)
107
- st.rerun()
108
 
109
  # Display Chat History
110
- if "chat_history" in st.session_state:
111
- for i, (user_msg, bot_response) in enumerate(st.session_state.chat_history):
112
- st.write(f"**User {i+1}:** {user_msg}")
113
- st.write(f"**Bot:** {bot_response}")
114
 
 
1
  import streamlit as st
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
3
  from threading import Thread
4
 
5
  # Model Initialization
 
28
  tokenizer, llama_pipeline = load_model()
29
 
30
  # Generate text
31
+ def generate_response(prompt, max_new_tokens):
32
+ response = llama_pipeline(
33
+ prompt,
34
+ max_new_tokens=max_new_tokens,
35
+ repetition_penalty=1.15
36
+ )
37
+ return response[0]['generated_text']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
  # Sidebar: Configuration
40
  st.sidebar.header("Chatbot Configuration")
 
48
  "แ‹จแŠ แˆ›แˆญแŠ› แŒแŒฅแˆ แƒแแˆแŠ",
49
  "แ‰ฐแˆจแ‰ต แŠ•แŒˆแˆจแŠ\n\nแŒ…แ‰ฅแŠ“ แŠ แŠ•แ‰ แˆณ",
50
  "แŠ แŠ•แ‹ต แŠ แˆตแ‰‚แŠ แ‰€แˆแ‹ต แŠ•แŒˆแˆจแŠ",
 
51
  "แ‹จแˆแˆจแŠ•แˆณแ‹ญ แ‹‹แŠ“ แŠจแ‰ฐแˆ› แˆตแˆ แˆแŠ•แ‹ตแŠ• แŠแ‹?",
52
  "แŠ แˆแŠ• แ‹จแŠ แˆœแˆชแŠซ แ•แˆฌแ‹šแ‹ณแŠ•แ‰ต แˆ›แŠ• แŠแ‹?",
 
 
 
 
 
53
  ]
54
 
55
  st.subheader("Chat with the Amharic Chatbot")
56
+ if "chat_history" not in st.session_state:
57
+ st.session_state.chat_history = []
58
 
59
  # Example selector
60
  example = st.selectbox("Choose an example:", ["Type your own message"] + examples)
 
64
 
65
  if st.button("Send"):
66
  if user_input:
67
+ # Generate response
 
 
 
 
68
  with st.spinner("Generating response..."):
69
+ response = generate_response(user_input, max_tokens)
70
+ st.session_state.chat_history.append((user_input, response))
 
 
 
71
 
72
  # Display Chat History
73
+ st.write("### Chat History")
74
+ for i, (user_msg, bot_response) in enumerate(st.session_state.chat_history):
75
+ st.write(f"**User {i+1}:** {user_msg}")
76
+ st.write(f"**Bot:** {bot_response}")
77