mamkkl commited on
Commit
374d11f
·
verified ·
1 Parent(s): 456ee4b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -1
app.py CHANGED
@@ -69,6 +69,9 @@ def generate_prompt(instruction, input=None):
69
  return PROMPT_DICT["prompt_no_input"].format(instruction=instruction)
70
 
71
  @spaces.GPU
 
 
 
72
  def generator(input_ids, generation_config, max_new_tokens):
73
  # Without streaming
74
  with torch.no_grad():
@@ -91,7 +94,7 @@ def respond(
91
  ):
92
  ins_f = generate_prompt(message,None)
93
  inputs = tokenizer(ins_f, return_tensors="pt")
94
- input_ids = inputs["input_ids"].cuda()
95
  max_new_tokens = 512
96
  generation_config = GenerationConfig(
97
  temperature=0.1,
 
69
  return PROMPT_DICT["prompt_no_input"].format(instruction=instruction)
70
 
71
  @spaces.GPU
72
+ def getIds(inputs):
73
+ return inputs["input_ids"].cuda()
74
+
75
  def generator(input_ids, generation_config, max_new_tokens):
76
  # Without streaming
77
  with torch.no_grad():
 
94
  ):
95
  ins_f = generate_prompt(message,None)
96
  inputs = tokenizer(ins_f, return_tensors="pt")
97
+ input_ids = getIds(inputs)
98
  max_new_tokens = 512
99
  generation_config = GenerationConfig(
100
  temperature=0.1,