Spaces:
Sleeping
Sleeping
Commit
·
3c75fe4
1
Parent(s):
e78cc7e
added files
Browse files- app.py +13 -0
- chat_fast_api.py +60 -0
- gradio_app.py +27 -0
- requirements.txt +0 -0
app.py
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import uvicorn
|
2 |
+
import threading
|
3 |
+
import chat_fast_api
|
4 |
+
import gradio_app
|
5 |
+
|
6 |
+
|
7 |
+
def start_fast():
|
8 |
+
uvicorn.run("chat_fast_api:app",host="0.0.0.0",port=8000)
|
9 |
+
|
10 |
+
fastapi_thread = threading.Thread(target=start_fast)
|
11 |
+
fastapi_thread.start()
|
12 |
+
|
13 |
+
gradio_app.start_gradio()
|
chat_fast_api.py
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI
|
2 |
+
from pydantic import BaseModel
|
3 |
+
from groq import Groq
|
4 |
+
import os
|
5 |
+
from dotenv import load_dotenv
|
6 |
+
load_dotenv()
|
7 |
+
|
8 |
+
# Initialize the FastAPI app
|
9 |
+
app = FastAPI()
|
10 |
+
|
11 |
+
# Get the API key from an environment variable
|
12 |
+
groq_api_key = os.getenv("GROQ_API_KEY")
|
13 |
+
if not groq_api_key:
|
14 |
+
raise ValueError("GROQ_API_KEY environment variable is not set")
|
15 |
+
|
16 |
+
# Initialize Groq client
|
17 |
+
client = Groq(api_key=groq_api_key)
|
18 |
+
|
19 |
+
# Define the system message
|
20 |
+
System_msg = '''-act as an experienced blockchain developer, working for 15 years.
|
21 |
+
-help me understand some concepts, assume I am a complete beginner.
|
22 |
+
-If the user asks anything not related to blockchain, just say you don't know about it.'''
|
23 |
+
|
24 |
+
# Request model for FastAPI
|
25 |
+
class ChatRequest(BaseModel):
|
26 |
+
message: str
|
27 |
+
history: list
|
28 |
+
|
29 |
+
# FastAPI chat endpoint
|
30 |
+
@app.post("/chat")
|
31 |
+
def chat(request: ChatRequest):
|
32 |
+
message = request.message
|
33 |
+
history = request.history
|
34 |
+
|
35 |
+
# Create the history_list to send to the Groq API
|
36 |
+
history_list = [{"role": "system", "content": System_msg}]
|
37 |
+
for human, ai in history:
|
38 |
+
history_list.append({"role": "user", "content": human})
|
39 |
+
history_list.append({"role": "assistant", "content": ai})
|
40 |
+
|
41 |
+
# Append the new user message to the history
|
42 |
+
history_list.append({"role": "user", "content": message})
|
43 |
+
|
44 |
+
# Try to get the response from the LLaMA API (Groq)
|
45 |
+
try:
|
46 |
+
response = client.chat.completions.create(
|
47 |
+
model="llama-3.1-70b-versatile", # Ensure the correct model name
|
48 |
+
messages=history_list,
|
49 |
+
temperature=1.0,
|
50 |
+
max_tokens=4000,
|
51 |
+
stream=False # Use streaming for real-time responses
|
52 |
+
)
|
53 |
+
|
54 |
+
final_message = response.choices[0].message.content
|
55 |
+
|
56 |
+
# Return the final AI-generated message
|
57 |
+
return {"response": final_message}
|
58 |
+
|
59 |
+
except Exception as e:
|
60 |
+
return {"response": f"Error: {str(e)}"}
|
gradio_app.py
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import requests
|
3 |
+
|
4 |
+
def predict(message, history):
|
5 |
+
# API request to FastAPI backend
|
6 |
+
response = requests.post("http://127.0.0.1:8000/chat", json={"message": message, "history": history})
|
7 |
+
|
8 |
+
if response.status_code == 200:
|
9 |
+
response_data = response.json()
|
10 |
+
return response_data['response']
|
11 |
+
else:
|
12 |
+
return "Error: Unable to connect to the FastAPI backend."
|
13 |
+
|
14 |
+
demo = gr.ChatInterface(
|
15 |
+
predict,
|
16 |
+
title="Blockchain Teacher",
|
17 |
+
theme=gr.themes.Soft(),
|
18 |
+
chatbot=gr.Chatbot(label="Learn about blockchain technology"),
|
19 |
+
textbox=gr.Textbox(
|
20 |
+
placeholder="Ask me anything about blockchain",
|
21 |
+
scale=7,
|
22 |
+
max_lines=2,
|
23 |
+
),
|
24 |
+
)
|
25 |
+
|
26 |
+
def start_gradio():
|
27 |
+
demo.launch(server_name="0.0.0.0", server_port= 7860)
|
requirements.txt
ADDED
File without changes
|