Sarah Ciston commited on
Commit
a6cd989
·
1 Parent(s): cc195e8

try with basic model and inputs param instead

Browse files
Files changed (2) hide show
  1. README.md +2 -2
  2. sketch.js +20 -11
README.md CHANGED
@@ -12,9 +12,9 @@ hf_oauth_scopes:
12
  - inference-api
13
  models:
14
  # - gpt-3.5-turbo
15
- # - Xenova/distilgpt2
16
  # - HuggingFaceH4/zephyr-7b-gemma-v0.1
17
- - HuggingFaceH4/zephyr-7b-beta
18
  # - openai-community/gpt2
19
  # - meta-llama/Meta-Llama-3-70B-Instruct
20
  # - Xenova/detr-resnet-50
 
12
  - inference-api
13
  models:
14
  # - gpt-3.5-turbo
15
+ - Xenova/distilgpt2
16
  # - HuggingFaceH4/zephyr-7b-gemma-v0.1
17
+ # - HuggingFaceH4/zephyr-7b-beta
18
  # - openai-community/gpt2
19
  # - meta-llama/Meta-Llama-3-70B-Instruct
20
  # - Xenova/detr-resnet-50
sketch.js CHANGED
@@ -174,21 +174,30 @@ new p5(function (p5) {
174
  async function runModel(PREPROMPT, PROMPT){
175
  // inference API version
176
 
177
- let MODELNAME = "HuggingFaceH4/zephyr-7b-beta"
 
 
178
  // let MODELNAME = "openai-community/gpt2"
179
  // let MODELNAME = 'mistral_inference'
180
-
181
- let out = await inference.textGeneration({
 
182
  model: MODELNAME,
183
- messages: [{
184
- role: "system",
185
- content: PREPROMPT
186
- },{
187
- role: "user",
188
- content: PROMPT
189
- }],
190
  max_new_tokens: 128
191
- });
 
 
 
 
 
 
 
 
 
 
 
 
192
 
193
  console.log(out)
194
 
 
174
  async function runModel(PREPROMPT, PROMPT){
175
  // inference API version
176
 
177
+ let INPUT = PREPROMPT + PROMPT
178
+
179
+ // let MODELNAME = "HuggingFaceH4/zephyr-7b-beta"
180
  // let MODELNAME = "openai-community/gpt2"
181
  // let MODELNAME = 'mistral_inference'
182
+ let MODELNAME = 'Xenova/distilgpt2'
183
+
184
+ let out = await inference.textGeneration({
185
  model: MODELNAME,
186
+ inputs: INPUT,
 
 
 
 
 
 
187
  max_new_tokens: 128
188
+ })
189
+
190
+ // let out = await inference.textGeneration({
191
+ // model: MODELNAME,
192
+ // messages: [{
193
+ // role: "system",
194
+ // content: PREPROMPT
195
+ // },{
196
+ // role: "user",
197
+ // content: PROMPT
198
+ // }],
199
+ // max_new_tokens: 128
200
+ // });
201
 
202
  console.log(out)
203