Artin2009 commited on
Commit
3c3e4d8
1 Parent(s): 9fa8e9b

Update chain_app.py

Browse files
Files changed (1) hide show
  1. chain_app.py +30 -0
chain_app.py CHANGED
@@ -672,6 +672,36 @@ async def main(message: cl.Message):
672
  # Send the concatenated content as a message
673
  await cl.Message(content=complete_content).send()
674
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
675
  elif chat_profile == 'Llama-3-70B':
676
  completion = groq_client.chat.completions.create(
677
  model="llama3-70b-8192",
 
672
  # Send the concatenated content as a message
673
  await cl.Message(content=complete_content).send()
674
 
675
+ elif chat_profile == 'Llama-3.1-70B':
676
+ completion = groq_client.chat.completions.create(
677
+ model="llama-3.1-8b-instant",
678
+ messages=[
679
+ {
680
+ "role": "user",
681
+ "content": message.content
682
+ }
683
+ ],
684
+ temperature=1,
685
+ max_tokens=1024,
686
+ top_p=1,
687
+ stream=True,
688
+ stop=None,
689
+ )
690
+
691
+ complete_content = ""
692
+
693
+ # Iterate over each chunk
694
+ for chunk in completion:
695
+ # Retrieve the content from the current chunk
696
+ content = chunk.choices[0].delta.content
697
+
698
+ # Check if the content is not None before concatenating it
699
+ if content is not None:
700
+ complete_content += content
701
+
702
+ # Send the concatenated content as a message
703
+ await cl.Message(content=complete_content).send()
704
+
705
  elif chat_profile == 'Llama-3-70B':
706
  completion = groq_client.chat.completions.create(
707
  model="llama3-70b-8192",