ldhldh commited on
Commit
4d3fbc3
·
verified ·
1 Parent(s): 93f3730

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -6,7 +6,7 @@ generator = pipeline("text-generation", model="foryui/Llama-3.2-1B-Instruct-GRPO
6
 
7
  def my_inference_function(text):
8
  prompt = f"Summary the context below\n\n{text}"
9
- output = generator([{"role": "user", "content": question}], max_new_tokens=1024, return_full_text=False)[0]
10
  output = output["generated_text"]
11
 
12
  return output[len(prompt):]
 
6
 
7
  def my_inference_function(text):
8
  prompt = f"Summary the context below\n\n{text}"
9
+ output = generator([{"role": "user", "content": prompt}], max_new_tokens=1024, return_full_text=False)[0]
10
  output = output["generated_text"]
11
 
12
  return output[len(prompt):]