File size: 4,455 Bytes
9f36b00
 
 
c241f02
9f36b00
bdb5f54
9f36b00
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
865707a
9f36b00
fd8d102
9f36b00
 
 
 
523292e
9f36b00
523292e
9f36b00
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53122c5
9f36b00
 
 
53122c5
9f36b00
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
# main.py
from fastapi import FastAPI, Depends, HTTPException, BackgroundTasks
from fastapi.middleware.cors import CORSMiddleware
import gradio as gr
from services.chat_service import ChatService
from services.model_service import ModelService
from services.data_service import DataService
from services.faq_service import FAQService
from auth.auth_handler import get_api_key
from models.base_models import UserInput, SearchQuery
import logging
import asyncio

# Configure logging
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler('chatbot.log'),
        logging.StreamHandler()
    ]
)
logger = logging.getLogger(__name__)

# Initialize FastAPI app
app = FastAPI(title="Bofrost Chat API", version="2.0.0")

# Add CORS middleware
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# Initialize services
model_service = ModelService()
data_service = DataService(model_service)
pdf_service = PDFService(model_service)
faq_service = FAQService(model_service)
chat_service = ChatService(model_service, data_service, pdf_service, faq_service)

# API endpoints
@app.post("/api/chat")
async def chat_endpoint(
    background_tasks: BackgroundTasks,
    user_input: UserInput,
    api_key: str = Depends(get_api_key)
):
    try:
        response, updated_history, search_results = await chat_service.chat(
            user_input.user_input,
            user_input.chat_history
        )
        return {
            "status": "success",
            "response": response,
            "chat_history": updated_history,
            "search_results": search_results
        }
    except Exception as e:
        logger.error(f"Error in chat endpoint: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.post("/api/search")
async def search_endpoint(
    query: SearchQuery,
    api_key: str = Depends(get_api_key)
):
    try:
        results = await data_service.search(query.query, query.top_k)
        return {"results": results}
    except Exception as e:
        logger.error(f"Error in search endpoint: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.post("/api/faq/search")
async def faq_search_endpoint(
    query: SearchQuery,
    api_key: str = Depends(get_api_key)
):
    try:
        results = await faq_service.search_faqs(query.query, query.top_k)
        return {"results": results}
    except Exception as e:
        logger.error(f"Error in FAQ search endpoint: {e}")
        raise HTTPException(status_code=500, detail=str(e))

# Gradio interface
def create_gradio_interface():
    with gr.Blocks(theme=gr.themes.Soft()) as demo:
        gr.Markdown("# 🦙 Bofrost Chat Assistant\nFragen Sie nach Produkten, Rezepten und mehr!")
        
        with gr.Row():
            with gr.Column(scale=4):
                chat_display = gr.Chatbot(label="Chat-Verlauf", height=400)
                user_input = gr.Textbox(
                    label="Ihre Nachricht",
                    placeholder="Stellen Sie Ihre Frage...",
                    lines=2
                )
                
            with gr.Column(scale=2):
                with gr.Accordion("Zusätzliche Informationen", open=False):
                    product_info = gr.JSON(label="Produktdetails")
        
        with gr.Row():
            submit_btn = gr.Button("Senden", variant="primary")
            clear_btn = gr.Button("Chat löschen")
            
        chat_history = gr.State([])
        
        async def respond(message, history):
            response, updated_history, search_results = await chat_service.chat(message, history)
            return response, updated_history, search_results
        
        submit_btn.click(
            respond,
            inputs=[user_input, chat_history],
            outputs=[chat_display, chat_history, product_info]
        )
        
        clear_btn.click(
            lambda: ([], [], None),
            outputs=[chat_display, chat_history, product_info]
        )
        
        demo.queue()
        return demo

if __name__ == "__main__":
    import uvicorn
    
    # Create and launch Gradio interface
    demo = create_gradio_interface()
    demo.launch(server_name="0.0.0.0", server_port=7860)
    
    # Start FastAPI server
    uvicorn.run(app, host="0.0.0.0", port=8000)