Spaces:
Runtime error
Runtime error
File size: 1,167 Bytes
a1c827d 528174e 1307254 21fc868 a1c827d e3870b9 a1c827d 0895977 a1c827d e3870b9 a1c827d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
from loguru import logger
from openai import OpenAI
import os
TOGETHER_API_KEY = os.environ.get("TOGETHER_API_KEY")
client = OpenAI(
api_key=TOGETHER_API_KEY,
base_url='https://api.together.xyz/v1',
)
def models():
return [
'teknium/OpenHermes-2p5-Mistral-7B',
'META-LLAMA/LLAMA-3-8B-CHAT-HF',
'microsoft/WizardLM-2-8x22B',
'WizardLM/WizardLM-13B-V1.2'
'Qwen/Qwen1.5-14B-Chat',
'togethercomputer/StripedHyena-Nous-7B',
'meta-llama/Llama-2-13b-chat-hf',
'meta-llama/Llama-2-70b-chat-hf',
'Open-Orca/Mistral-7B-OpenOrca',
'zero-one-ai/Yi-34B-Chat',
]
def ask(model, system_prompt, pre_prompt, question, temperature=0.7):
messages = [
{'role': 'system', 'content': f"{system_prompt} {pre_prompt}"},
{'role': 'user', 'content': f"{question}"},
]
logger.debug(f"<< {model} << {question}")
chat_completion = client.chat.completions.create(messages=messages, model=model, temperature=temperature)
response = chat_completion.choices[0]
answer = response.message.content
logger.debug(f">> {model} >> {answer}")
return answer
|