Artin2009 commited on
Commit
cf2c337
1 Parent(s): facdb08

Persist changes from Dev Mode

Browse files
__pycache__/chain_app.cpython-39.pyc CHANGED
Binary files a/__pycache__/chain_app.cpython-39.pyc and b/__pycache__/chain_app.cpython-39.pyc differ
 
chain_app.py CHANGED
@@ -15,7 +15,7 @@ hf_token_llama_3_1 = os.environ.get('HF_TOKEN_FOR_31')
15
  openai_api_key = os.environ.get('OPENAI_API_KEY')
16
  groq_api_key = os.environ.get('GROQ_API_KEY')
17
  cohere_api_key = os.environ.get('COHERE_API_KEY')
18
-
19
 
20
  hf_text_client = Client("Artin2009/text-generation", hf_token=hf_token)
21
  # hf_image_client = Client('Artin2009/image-generation')
@@ -2198,16 +2198,30 @@ async def main(message: cl.Message):
2198
  # ).send()
2199
 
2200
  elif chat_profile == 'Llama-3.1-405B':
2201
- client = Fireworks(api_key="O9D2HQsat357QRQ2wEAC3buShjL8Ea4K7ndDqwkAsAdsDulz")
2202
- response = client.chat.completions.create(
2203
- model="accounts/fireworks/models/llama-v3p1-405b-instruct",
2204
- messages=[{
2205
- "role": "user",
2206
- "content": message.content,
2207
- }],
2208
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2209
  await cl.Message(
2210
- content=response.choices[0].message.content,
2211
  ).send()
2212
 
2213
 
 
15
  openai_api_key = os.environ.get('OPENAI_API_KEY')
16
  groq_api_key = os.environ.get('GROQ_API_KEY')
17
  cohere_api_key = os.environ.get('COHERE_API_KEY')
18
+ fireworks_api_key = os.environ.get('FIREWORKS_API_KEY')
19
 
20
  hf_text_client = Client("Artin2009/text-generation", hf_token=hf_token)
21
  # hf_image_client = Client('Artin2009/image-generation')
 
2198
  # ).send()
2199
 
2200
  elif chat_profile == 'Llama-3.1-405B':
2201
+ # client = Fireworks(api_key=f'{fireworks_api_key}')
2202
+ # response = client.chat.completions.create(
2203
+ # model="accounts/fireworks/models/llama-v3p1-405b-instruct",
2204
+ # messages=[{
2205
+ # "role": "user",
2206
+ # "content": message.content,
2207
+ # }],
2208
+ # )
2209
+ # await cl.Message(
2210
+ # content=response.choices[0].message.content,
2211
+ # ).send()
2212
+
2213
+ API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3.1-405B"
2214
+ headers = {"Authorization": "Bearer hf_token_llama_3_1"}
2215
+ def query(payload):
2216
+ response = requests.post(API_URL, headers=headers, json=payload)
2217
+ return response.json()
2218
+
2219
+ output = query({
2220
+ "inputs": message.content,
2221
+ })
2222
+
2223
  await cl.Message(
2224
+ content=output
2225
  ).send()
2226
 
2227