adarshajay commited on
Commit
e57fd8a
·
verified ·
1 Parent(s): 20df812

Upload 2 files

Browse files
Files changed (2) hide show
  1. main.py +45 -0
  2. requirements.txt +67 -0
main.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from transformers import pipeline
3
+
4
+
5
+ @st.cache_resource
6
+ def load_model():
7
+ # Load the model once and cache it
8
+ return pipeline("text-generation", model="deepseek-ai/deepseek-coder-1.3b-instruct")
9
+
10
+
11
+ # App UI
12
+ st.title("🤖 DeepSeek Coder Chat")
13
+ st.write("Ask questions to the DeepSeek Coder AI model!")
14
+
15
+ # User input
16
+ user_input = st.text_input("Enter your question:", value="Who are you?")
17
+
18
+ if st.button("Generate Response"):
19
+ # Format messages in chat format
20
+ messages = [{"role": "user", "content": user_input}]
21
+
22
+ # Load cached model
23
+ pipe = load_model()
24
+
25
+ # Generate response with loading indicator
26
+ with st.spinner("Generating response..."):
27
+ try:
28
+ response = pipe(messages)
29
+
30
+ # Display formatted output
31
+ st.subheader("Response:")
32
+ st.write(response[0]['generated_text'])
33
+
34
+ except Exception as e:
35
+ st.error(f"An error occurred: {str(e)}")
36
+
37
+ # Sidebar with info
38
+ with st.sidebar:
39
+ st.markdown("### Model Information")
40
+ st.write("This app uses the deepseek-ai/deepseek-coder-1.3b-instruct model")
41
+ st.markdown("### System Requirements")
42
+ st.write("⚠️ Note: This model requires significant computational resources:")
43
+ st.write("- ~3GB RAM minimum")
44
+ st.write("- ~5GB disk space for model weights")
45
+ st.write("- May take 30-60 seconds to load initially")
requirements.txt ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ altair==5.5.0
2
+ attrs==24.3.0
3
+ blinker==1.9.0
4
+ cachetools==5.5.1
5
+ certifi==2024.12.14
6
+ charset-normalizer==3.4.1
7
+ click==8.1.8
8
+ filelock==3.17.0
9
+ fsspec==2024.12.0
10
+ gitdb==4.0.12
11
+ GitPython==3.1.44
12
+ huggingface-hub==0.27.1
13
+ idna==3.10
14
+ Jinja2==3.1.5
15
+ jsonschema==4.23.0
16
+ jsonschema-specifications==2024.10.1
17
+ markdown-it-py==3.0.0
18
+ MarkupSafe==3.0.2
19
+ mdurl==0.1.2
20
+ mpmath==1.3.0
21
+ narwhals==1.23.0
22
+ networkx==3.4.2
23
+ numpy==2.2.2
24
+ nvidia-cublas-cu12==12.4.5.8
25
+ nvidia-cuda-cupti-cu12==12.4.127
26
+ nvidia-cuda-nvrtc-cu12==12.4.127
27
+ nvidia-cuda-runtime-cu12==12.4.127
28
+ nvidia-cudnn-cu12==9.1.0.70
29
+ nvidia-cufft-cu12==11.2.1.3
30
+ nvidia-curand-cu12==10.3.5.147
31
+ nvidia-cusolver-cu12==11.6.1.9
32
+ nvidia-cusparse-cu12==12.3.1.170
33
+ nvidia-nccl-cu12==2.21.5
34
+ nvidia-nvjitlink-cu12==12.4.127
35
+ nvidia-nvtx-cu12==12.4.127
36
+ packaging==24.2
37
+ pandas==2.2.3
38
+ pillow==11.1.0
39
+ protobuf==5.29.3
40
+ pyarrow==19.0.0
41
+ pydeck==0.9.1
42
+ Pygments==2.19.1
43
+ python-dateutil==2.9.0.post0
44
+ pytz==2024.2
45
+ PyYAML==6.0.2
46
+ referencing==0.36.1
47
+ regex==2024.11.6
48
+ requests==2.32.3
49
+ rich==13.9.4
50
+ rpds-py==0.22.3
51
+ safetensors==0.5.2
52
+ six==1.17.0
53
+ smmap==5.0.2
54
+ streamlit==1.41.1
55
+ sympy==1.13.1
56
+ tenacity==9.0.0
57
+ tokenizers==0.21.0
58
+ toml==0.10.2
59
+ torch==2.5.1
60
+ tornado==6.4.2
61
+ tqdm==4.67.1
62
+ transformers==4.48.1
63
+ triton==3.1.0
64
+ typing_extensions==4.12.2
65
+ tzdata==2025.1
66
+ urllib3==2.3.0
67
+ watchdog==6.0.0