Komal01 commited on
Commit
89cc1d6
·
verified ·
1 Parent(s): b799443

Upload 6 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ deepseek_cyfuture/index.faiss filter=lfs diff=lfs merge=lfs -text
Dockerfile CHANGED
@@ -25,6 +25,6 @@ COPY --chown=user . .
25
  # Make the start script executable
26
  RUN chmod +x start.sh
27
  # Expose FastAPI & Streamlit ports
28
- EXPOSE 7860 8501
29
 
30
  CMD ["./start.sh"]
 
25
  # Make the start script executable
26
  RUN chmod +x start.sh
27
  # Expose FastAPI & Streamlit ports
28
+ EXPOSE 7860 8000
29
 
30
  CMD ["./start.sh"]
app.py CHANGED
@@ -94,6 +94,7 @@ def create_rag_chain(retriever):
94
  You are an AI questiona answering assistant specialized in answering user queries strictly from the provided context. Give detailed answer to user question considering the context.
95
 
96
  STRICT RULES:
 
97
  - You *must not* answer any questions outside the provided context.
98
  - If the question is unrelated to billing, payments, customer, or meter reading, respond with exactly:
99
  **"This question is outside my specialized domain."**
@@ -220,7 +221,7 @@ def read_root():
220
  if __name__ == "__main__":
221
  # start my fastapi app
222
  import uvicorn
223
- uvicorn.run(app, host="127.0.0.1", port=7860)
224
 
225
 
226
  # questions=[ "Is the website accessible through mobile also? please tell the benefits of it","How do I register for a new connection?","how to make payments?",]
 
94
  You are an AI questiona answering assistant specialized in answering user queries strictly from the provided context. Give detailed answer to user question considering the context.
95
 
96
  STRICT RULES:
97
+ For generic user query like hi , hello, how are you, etc. respond with generic response like "Hello! How can I assist you today?"
98
  - You *must not* answer any questions outside the provided context.
99
  - If the question is unrelated to billing, payments, customer, or meter reading, respond with exactly:
100
  **"This question is outside my specialized domain."**
 
221
  if __name__ == "__main__":
222
  # start my fastapi app
223
  import uvicorn
224
+ uvicorn.run(app, host="127.0.0.1", port=8000)
225
 
226
 
227
  # questions=[ "Is the website accessible through mobile also? please tell the benefits of it","How do I register for a new connection?","how to make payments?",]
deepseek_cyfuture/index.faiss ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d6fcd5929682d1b0742495a57b13b337dc6f61e16b6a6c121125cd385963c28
3
+ size 125997
deepseek_cyfuture/index.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d12e7c67bfd8614f8d86086c65c5fe4235288e2e8e9e3a081fb30369ce8ae1b9
3
+ size 40637
start.sh CHANGED
@@ -33,7 +33,7 @@ echo "Ollama is ready."
33
  echo "API is running on: http://0.0.0.0:7860"
34
 
35
  # Start FastAPI in the background
36
- uvicorn app:app --host 0.0.0.0 --port 7860 --workers 4 --limit-concurrency 20 &
37
 
38
  # Start Streamlit for UI
39
- streamlit run streamlit_app.py --server.port 8501 --server.address 0.0.0.0
 
33
  echo "API is running on: http://0.0.0.0:7860"
34
 
35
  # Start FastAPI in the background
36
+ uvicorn app:app --host 0.0.0.0 --port 8000 --workers 4 --limit-concurrency 20 &
37
 
38
  # Start Streamlit for UI
39
+ streamlit run streamlit_app.py --server.port 7860 --server.address 0.0.0.0
streamlit_app.py CHANGED
@@ -11,7 +11,7 @@ if "messages" not in st.session_state:
11
 
12
  # Function to query AI API and stream response
13
  def query_ai(question):
14
- url = "http://127.0.0.1:7860/query/"
15
  params = {"input_text": question}
16
 
17
  with requests.get(url, params=params, stream=True) as response:
 
11
 
12
  # Function to query AI API and stream response
13
  def query_ai(question):
14
+ url = "http://127.0.0.1:8000/query/"
15
  params = {"input_text": question}
16
 
17
  with requests.get(url, params=params, stream=True) as response: