Update lib/@randydev/together/llama.js
Browse files
lib/@randydev/together/llama.js
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
import got from 'got';
|
2 |
|
3 |
-
async function NvidiaTogether(message) {
|
4 |
const response = await got.post('https://api.together.xyz/v1/chat/completions', {
|
5 |
headers: {
|
6 |
'Authorization': 'Bearer ' + process.env['TOGETHER_API_KEY'],
|
@@ -9,7 +9,8 @@ async function NvidiaTogether(message) {
|
|
9 |
json: {
|
10 |
'model': 'nvidia/Llama-3.1-Nemotron-70B-Instruct-HF',
|
11 |
'messages': [
|
12 |
-
{"role": "system", "content":
|
|
|
13 |
{'role': 'user', 'content': `${message}`}
|
14 |
],
|
15 |
'max_tokens': null,
|
|
|
1 |
import got from 'got';
|
2 |
|
3 |
+
async function NvidiaTogether(message, { system_prompt = "" } = {}) {
|
4 |
const response = await got.post('https://api.together.xyz/v1/chat/completions', {
|
5 |
headers: {
|
6 |
'Authorization': 'Bearer ' + process.env['TOGETHER_API_KEY'],
|
|
|
9 |
json: {
|
10 |
'model': 'nvidia/Llama-3.1-Nemotron-70B-Instruct-HF',
|
11 |
'messages': [
|
12 |
+
{"role": "system", "content": `${system_prompt}`},
|
13 |
+
|
14 |
{'role': 'user', 'content': `${message}`}
|
15 |
],
|
16 |
'max_tokens': null,
|