abdfajar707 commited on
Commit
ad0cc09
·
verified ·
1 Parent(s): b514c5e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -2
app.py CHANGED
@@ -1,3 +1,6 @@
 
 
 
1
  from unsloth import FastLanguageModel
2
  import torch
3
  import gradio as gr
@@ -27,10 +30,14 @@ if True:
27
 
28
 
29
  # Fungsi untuk menghasilkan respons
30
- def generate_response(prompt, max_length=4096):
31
  inputs = tokenizer(
32
  [
33
- prompt, # instruction
 
 
 
 
34
  ], return_tensors = "pt").to("cuda")
35
  outputs = model.generate(**inputs, max_length=max_length, pad_token_id=tokenizer.eos_token_id)
36
  response = tokenizer.decode(outputs[0], skip_special_tokens=True)
 
1
+ alpaca_prompt.format(
2
+ "", # input
3
+ "", outputs
4
  from unsloth import FastLanguageModel
5
  import torch
6
  import gradio as gr
 
30
 
31
 
32
  # Fungsi untuk menghasilkan respons
33
+ def generate_response(prompt, max_length=1024):
34
  inputs = tokenizer(
35
  [
36
+ alpaca_prompt.format(
37
+ prompt, # instruction
38
+ "", # input
39
+ "" outputs
40
+ )
41
  ], return_tensors = "pt").to("cuda")
42
  outputs = model.generate(**inputs, max_length=max_length, pad_token_id=tokenizer.eos_token_id)
43
  response = tokenizer.decode(outputs[0], skip_special_tokens=True)