harshSethi commited on
Commit
f0eca0c
·
verified ·
1 Parent(s): 0810275

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -18
app.py CHANGED
@@ -3,6 +3,7 @@ import gradio as gr
3
  import datetime as dt
4
  import pytz
5
  from groq import Groq
 
6
 
7
  # Get the API key from an environment variable
8
  groq_api_key = os.getenv("GROQ_API_KEY")
@@ -33,24 +34,32 @@ def predict(message,history):
33
  history_list.append({"role": "assistant", "content": ai})
34
  history_list.append({"role": "user", "content": message})
35
 
36
- response = client.chat.completions.create(
37
- model = "llama-3.1-70b-versatile",
38
- messages = history_list,
39
- temperature = 1.0,
40
- max_tokens=4000,
41
- stream = True
42
- )
43
-
44
- partialMessage = ""
45
- chunkCount = 0
46
- for chunk in response:
47
- chunkContent = chunk.choices[0].delta.content
48
- if chunkContent:
49
- chunkCount+=1
50
- partialMessage= partialMessage + chunkContent
51
- yield partialMessage
52
-
53
- pprint(f"[tokens = {chunkCount}] {message}")
 
 
 
 
 
 
 
 
54
 
55
  demo = gr.ChatInterface(
56
  predict,
 
3
  import datetime as dt
4
  import pytz
5
  from groq import Groq
6
+ import logging
7
 
8
  # Get the API key from an environment variable
9
  groq_api_key = os.getenv("GROQ_API_KEY")
 
34
  history_list.append({"role": "assistant", "content": ai})
35
  history_list.append({"role": "user", "content": message})
36
 
37
+ try:
38
+ response = client.chat.completions.create(
39
+ model="lama-3.1-70b-versatile", # Ensure the correct model name
40
+ messages=history_list,
41
+ temperature=1.0,
42
+ max_tokens=4000,
43
+ stream=True # Use streaming
44
+ )
45
+
46
+ partial_message = ""
47
+ chunk_count = 0
48
+
49
+ # Stream the response in chunks
50
+ for chunk in response:
51
+ chunk_content = chunk.choices[0].delta.content
52
+ if chunk_content:
53
+ chunk_count += 1
54
+ partial_message += chunk_content
55
+ yield partial_message # Send partial message to Gradio
56
+
57
+ pprint(f"[tokens = {chunk_count}] {message}")
58
+
59
+ except Exception as e:
60
+ logging.error(f"API request failed: {e}")
61
+ yield "Error: Unable to connect to Groq API."
62
+
63
 
64
  demo = gr.ChatInterface(
65
  predict,