rmayormartins commited on
Commit
9a3ed0e
1 Parent(s): 233b1aa

Subindo arquivos

Browse files
Files changed (3) hide show
  1. README.md +25 -7
  2. app.py +47 -0
  3. requirements.txt +4 -0
README.md CHANGED
@@ -1,13 +1,31 @@
1
  ---
2
- title: My Llama3 Groq
3
- emoji: 🏢
4
- colorFrom: pink
5
- colorTo: indigo
6
  sdk: gradio
7
- sdk_version: 4.36.1
8
  app_file: app.py
9
  pinned: false
10
- license: ecl-2.0
11
  ---
12
 
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ title: My-Llama3-Groq
3
+ emoji: 🦙🤖
4
+ colorFrom: blue
5
+ colorTo: green
6
  sdk: gradio
7
+ sdk_version: "4.12.0"
8
  app_file: app.py
9
  pinned: false
 
10
  ---
11
 
12
+ # My Chatbot
13
+
14
+ My groq llama3 chatbot
15
+
16
+
17
+ # This project
18
+
19
+ This project uses Groq cloud API
20
+ Llama3-70b-8192
21
+
22
+ ## More information
23
+
24
+ Developed by Ramon Mayor Martins (2024)
25
26
+ hp https://rmayormartins.github.io/
27
+ twitter @rmayormartins
28
+ github https://github.com/rmayormartins
29
+
30
+ ## Special thanks
31
+ Groq https://groq.com/
app.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from groq import Groq
3
+ import gradio as gr
4
+
5
+ # Recupera a chave API da variável de ambiente
6
+ api_key = os.getenv('GROQ_API_KEY')
7
+
8
+ client = Groq(api_key=api_key)
9
+
10
+ system_prompt = {
11
+ "role": "system",
12
+ "content": "You are a useful assistant. You reply with efficient answers."
13
+ }
14
+
15
+ async def chat_groq(message, history):
16
+ messages = [system_prompt]
17
+
18
+ for msg in history:
19
+ messages.append({"role": "user", "content": str(msg[0])})
20
+ messages.append({"role": "assistant", "content": str(msg[1])})
21
+
22
+ messages.append({"role": "user", "content": str(message)})
23
+
24
+ response_content = ''
25
+
26
+ stream = client.chat.completions.create(
27
+ model="llama3-70b-8192",
28
+ messages=messages,
29
+ max_tokens=1024,
30
+ temperature=1.3,
31
+ stream=True
32
+ )
33
+
34
+ for chunk in stream:
35
+ content = chunk.choices[0].delta.content
36
+ if content:
37
+ response_content += chunk.choices[0].delta.content
38
+ yield response_content
39
+
40
+ with gr.Blocks(theme=gr.themes.Monochrome(), fill_height=True) as demo:
41
+ gr.ChatInterface(chat_groq,
42
+ clear_btn=None,
43
+ undo_btn=None,
44
+ retry_btn=None)
45
+
46
+ demo.queue()
47
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ groq
2
+ gradio
3
+ transformers
4
+ torch