Prajith04 commited on
Commit
75baf07
·
1 Parent(s): c3dd6f3
Files changed (4) hide show
  1. Dockerfile +33 -0
  2. README.md +10 -0
  3. app.py +61 -0
  4. requirements.txt +9 -0
Dockerfile ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10-slim
2
+
3
+ # Install system dependencies
4
+ RUN apt-get update && apt-get install -y \
5
+ git curl && \
6
+ rm -rf /var/lib/apt/lists/*
7
+
8
+ # Set working directory
9
+ WORKDIR /app
10
+
11
+ # Copy project files
12
+ COPY . /app
13
+
14
+ # Create cache directory
15
+ RUN mkdir -p /app/cache && chmod -R 777 /app/cache
16
+
17
+ # Set environment variables
18
+ ENV TRANSFORMERS_CACHE=/app/cache \
19
+ HF_HOME=/app/cache \
20
+ SENTENCE_TRANSFORMERS_HOME=/app/cache \
21
+ PORT=7860 \
22
+ PYTHONUNBUFFERED=1
23
+
24
+ # Install Python dependencies
25
+ RUN pip install --no-cache-dir --upgrade pip && \
26
+ pip install --no-cache-dir -r requirements.txt && \
27
+ pip install gradio
28
+
29
+ # Expose Gradio port
30
+ EXPOSE 7860
31
+
32
+ # Run the app
33
+ CMD ["python", "app.py"]
README.md CHANGED
@@ -1 +1,11 @@
 
 
 
 
 
 
 
 
 
 
1
  # TroubleGraph
 
1
+ ---
2
+ title: Customer Support RAG
3
+ emoji: 💻
4
+ colorFrom: blue
5
+ colorTo: yellow
6
+ sdk: gradio
7
+ sdk_version: 5.28.0
8
+ app_file: app.py
9
+ pinned: false
10
+ ---
11
  # TroubleGraph
app.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mport gradio as gr
2
+ from gliner import GLiNER
3
+ from langchain_community.vectorstores import Qdrant
4
+ from langchain_community.embeddings import HuggingFaceEmbeddings
5
+ from langchain_qdrant import QdrantVectorStore
6
+ from langchain_groq import ChatGroq
7
+ from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
8
+ from langchain_core.runnables import RunnableMap
9
+ from langchain_core.output_parsers import StrOutputParser
10
+ from langchain.memory import ChatMessageHistory
11
+ import re
12
+ from datasets import load_dataset
13
+ from langchain.schema import Document
14
+ import os
15
+
16
+ # Initialize once
17
+ gliner_model = GLiNER.from_pretrained("urchade/gliner_medium-v2.1")
18
+ embedding_model = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
19
+
20
+ # Qdrant setup
21
+ doc_store = QdrantVectorStore.from_existing_collection(
22
+ embedding=embedding_model,
23
+ collection_name="customer_support_docsv1",
24
+ url=os.getenv("QDRANT_URL"),
25
+ api_key=os.getenv("QDRANT_API_KEY"),
26
+ )
27
+
28
+ retriever = doc_store.as_retriever(search_type="similarity", search_kwargs={"k": 1})
29
+ llm = ChatGroq(api_key=os.getenv("GROQ_API_KEY"), model="meta-llama/llama-4-scout-17b-16e-instruct")
30
+
31
+ chat_prompt = ChatPromptTemplate.from_messages([
32
+ ("system", "You are an intelligent assistant. Use context and chat history to answer."),
33
+ MessagesPlaceholder("chat_history"),
34
+ ("human", "{query}")
35
+ ])
36
+
37
+ rag_chain = RunnableMap({
38
+ "context": lambda x: retriever.invoke(x["query"]),
39
+ "query": lambda x: x["query"],
40
+ "chat_history": lambda x: x["chat_history"]
41
+ }) | chat_prompt | llm | StrOutputParser()
42
+
43
+ # Shared memory
44
+ memory = ChatMessageHistory()
45
+
46
+ # Gradio handler
47
+ def chat_fn(message, history_list):
48
+ # Use LangChain-style history for context
49
+ response = rag_chain.invoke({
50
+ "query": message,
51
+ "chat_history": memory.messages
52
+ })
53
+
54
+ # Append new messages to the LangChain memory
55
+ memory.add_user_message(message)
56
+ memory.add_ai_message(response)
57
+
58
+ return response
59
+
60
+ chatbot = gr.ChatInterface(fn=chat_fn, title="🛠️ Customer Support Chatbot")
61
+ chatbot.launch()
requirements.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ gradio
2
+ gliner
3
+ qdrant-client
4
+ sentence-transformers
5
+ datasets
6
+ langchain
7
+ langchain-community
8
+ langchain-qdrant
9
+ langchain-groq