File size: 1,249 Bytes
aad58a2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ba1ca3f
aad58a2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ba1ca3f
 
aad58a2
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import gradio as gr
import os
from openai import OpenAI
import time

# Initialize the OpenAI Client
client = OpenAI(
    api_key=os.environ.get("RUNPOD_API_KEY"),
    base_url="https://api.runpod.ai/v2/vllm-k0g4c60zor9xuu/openai/v1",
)

def runpod_chat(question, history):
    if not history:
        history = []
    history.append({"role": "user", "content": question})

    response_stream = client.chat.completions.create(
        model="ambrosfitz/llama-3-history",
        messages=history,
        temperature=0,
        max_tokens=150,
        stream=True,
    )
    
    # Stream the response and add to history
    responses = []
    for message in response_stream:
        response = message.choices[0].delta.content
        responses.append(response)
        history.append({"role": "assistant", "content": response})
        time.sleep(0.3)  # Simulate typing delay
        yield "RunPod: " + response

# Set up the Gradio interface
iface = gr.Interface(
    fn=runpod_chat,
    inputs=[
        gr.Textbox(label="Enter your question:"),
        gr.State(label="History")
    ],
    outputs="chat",
    title="RunPod Chat",
    description="This app interfaces with RunPod's API to provide responses to your queries."
)

iface.launch()