Rohit Rajpoot commited on
Commit
460b935
·
1 Parent(s): b156062

Scaffold DeepSeek-R1 Streamlit demo

Browse files
Files changed (2) hide show
  1. app.py +38 -5
  2. requirements.txt +1 -0
app.py CHANGED
@@ -1,19 +1,52 @@
1
  import streamlit as st
 
 
2
  from assist.chat import chat as embed_chat
3
  from assist.bayes_chat import bayes_chat
4
  from assist.transformer_demo import transformer_next
5
 
6
- st.title("RepoSage Chatbot Demo")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
- question = st.text_input("Enter your question below:")
 
 
 
 
9
 
10
- col1, col2, col3 = st.columns(3)
11
  with col1:
 
 
 
 
 
 
 
 
 
12
  if st.button("Embedding Q&A"):
13
  st.write(embed_chat(question))
14
- with col2:
 
15
  if st.button("Bayesian Q&A"):
16
  st.write(bayes_chat(question))
17
- with col3:
 
18
  if st.button("Transformer Demo"):
19
  st.write(transformer_next(question))
 
 
 
 
1
  import streamlit as st
2
+
3
+ # Your existing demos
4
  from assist.chat import chat as embed_chat
5
  from assist.bayes_chat import bayes_chat
6
  from assist.transformer_demo import transformer_next
7
 
8
+ # DeepSeek imports
9
+ from transformers import AutoModelForCausalLM, AutoTokenizer, TextGenerationPipeline
10
+
11
+ st.set_page_config(page_title="RepoSage All-in-One Demo", layout="wide")
12
+ st.title("🤖 RepoSage Unified Demo")
13
+
14
+ # Cache and load DeepSeek-R1
15
+ @st.cache_resource
16
+ def load_deepseek():
17
+ model_name = "deepseek-ai/DeepSeek-Coder-1.3B-base"
18
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
19
+ model = AutoModelForCausalLM.from_pretrained(model_name)
20
+ return TextGenerationPipeline(model=model, tokenizer=tokenizer)
21
+
22
+ deepseek_gen = load_deepseek()
23
 
24
+ # User input
25
+ question = st.text_input("Enter your question or prompt below:")
26
+
27
+ # Four buttons side by side, with DeepSeek first
28
+ col1, col2, col3, col4 = st.columns(4)
29
 
 
30
  with col1:
31
+ if st.button("DeepSeek-R1 Demo"):
32
+ if not question.strip():
33
+ st.warning("Please enter a prompt first.")
34
+ else:
35
+ with st.spinner("Generating with DeepSeek…"):
36
+ out = deepseek_gen(question, max_new_tokens=100, do_sample=True)
37
+ st.code(out[0]["generated_text"], language="text")
38
+
39
+ with col2:
40
  if st.button("Embedding Q&A"):
41
  st.write(embed_chat(question))
42
+
43
+ with col3:
44
  if st.button("Bayesian Q&A"):
45
  st.write(bayes_chat(question))
46
+
47
+ with col4:
48
  if st.button("Transformer Demo"):
49
  st.write(transformer_next(question))
50
+
51
+ st.markdown("---")
52
+ st.caption("DeepSeek-R1, Embedding, Bayesian & Transformer demos all in one place ✅")
requirements.txt CHANGED
@@ -5,3 +5,4 @@ streamlit==1.46.0
5
  typer==0.16.0
6
  rich==14.0.0
7
  torch==2.7.1
 
 
5
  typer==0.16.0
6
  rich==14.0.0
7
  torch==2.7.1
8
+ transformers