Create together/llama.js
Browse files
lib/@randydev/together/llama.js
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import got from 'got';
|
2 |
+
|
3 |
+
async function NvidiaTogether(message) {
|
4 |
+
const response = await got.post('https://api.together.xyz/v1/chat/completions', {
|
5 |
+
headers: {
|
6 |
+
'Authorization': 'Bearer ' + process.env['TOGETHER_API_KEY'],
|
7 |
+
'Content-Type': 'application/json'
|
8 |
+
},
|
9 |
+
json: {
|
10 |
+
'model': 'nvidia/Llama-3.1-Nemotron-70B-Instruct-HF',
|
11 |
+
'messages': [{'role': 'user', 'content': `${message}`}],
|
12 |
+
'max_tokens': null,
|
13 |
+
'temperature': 0.7,
|
14 |
+
'top_p': 0.7,
|
15 |
+
'top_k': 50,
|
16 |
+
'repetition_penalty': 1,
|
17 |
+
'stream': false
|
18 |
+
}
|
19 |
+
});
|
20 |
+
return await response.json();
|
21 |
+
}
|