Kevin Fink
commited on
Commit
·
15a4335
1
Parent(s):
2ba9bb8
dev
Browse files
app.py
CHANGED
@@ -256,10 +256,11 @@ def predict(text):
|
|
256 |
if torch.cuda.is_available():
|
257 |
model = model.to('cuda')
|
258 |
inputs = {key: value.to('cuda') for key, value in inputs.items()}
|
259 |
-
|
|
|
260 |
# Generate outputs
|
261 |
with torch.no_grad(): # Disable gradient calculation for inference
|
262 |
-
outputs = model.generate(inputs)
|
263 |
|
264 |
## Decode the generated output
|
265 |
#predictions = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
|
|
256 |
if torch.cuda.is_available():
|
257 |
model = model.to('cuda')
|
258 |
inputs = {key: value.to('cuda') for key, value in inputs.items()}
|
259 |
+
print('xxxxxxxxxxxxxxxxxxxxxxx')
|
260 |
+
print(inputs)
|
261 |
# Generate outputs
|
262 |
with torch.no_grad(): # Disable gradient calculation for inference
|
263 |
+
outputs = model.generate(inputs['input_ids'][0])
|
264 |
|
265 |
## Decode the generated output
|
266 |
#predictions = tokenizer.decode(outputs[0], skip_special_tokens=True)
|