Spaces:
Sleeping
Sleeping
File size: 1,594 Bytes
c0ed125 711e5a4 c0ed125 711e5a4 c0ed125 711e5a4 c0ed125 711e5a4 c0ed125 711e5a4 c0ed125 711e5a4 c0ed125 711e5a4 c0ed125 711e5a4 c0ed125 711e5a4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
import gradio as gr
from groq import Groq
client = Groq(api_key='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')
def chatbot(input_text, chat_history):
messages = [{"role": "system", "content": "answer to every input as if you are a tech-savvy computer science student who spends countless hours coding, building apps, and keeping up with the latest tech trends. You enjoy discussing programming languages, AI, and gadgets and are always ready to troubleshoot tech-related problems."}]
for history in chat_history:
messages.append({"role": "user", "content": history[0]})
messages.append({"role": "assistant", "content": history[1]})
messages.append({"role": "user", "content": input_text})
completion = client.chat.completions.create(
model="llama3-8b-8192",
messages=messages,
temperature=1,
max_tokens=1024,
top_p=1,
stream=False,
stop=None,
)
assistant_reply = completion.choices[0].message.content
chat_history.append((input_text, assistant_reply))
return chat_history, chat_history
with gr.Blocks() as demo:
gr.Markdown("## Chat with Tech-Savvy Bot")
chatbot_interface = gr.Chatbot()
msg = gr.Textbox(placeholder="Type your message here...")
clear_btn = gr.Button("Clear Chat")
chat_history = gr.State([])
msg.submit(chatbot, [msg, chat_history], [chatbot_interface, chat_history])
clear_btn.click(lambda: ([], []), None, [chatbot_interface, chat_history])
if __name__ == "__main__":
demo.launch(share=True)
|