Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -4,8 +4,8 @@
|
|
4 |
#os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
|
5 |
|
6 |
import gradio as gr
|
7 |
-
|
8 |
-
|
9 |
|
10 |
import sentencepiece
|
11 |
from transformers import MBartTokenizer, MBartForConditionalGeneration
|
@@ -22,7 +22,7 @@ def run_model(input_text,
|
|
22 |
#encode input to vector
|
23 |
input_text = str(input_text)
|
24 |
input_text = ' '.join(input_text.split()) # hapus white space
|
25 |
-
input_tokenized = mbart_tokenizer.encode(input_text, return_tensors='
|
26 |
|
27 |
#generate input
|
28 |
summary_ids = mbart_model.generate(input_tokenized,
|
|
|
4 |
#os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
|
5 |
|
6 |
import gradio as gr
|
7 |
+
import tensorflow as tf
|
8 |
+
import torch
|
9 |
|
10 |
import sentencepiece
|
11 |
from transformers import MBartTokenizer, MBartForConditionalGeneration
|
|
|
22 |
#encode input to vector
|
23 |
input_text = str(input_text)
|
24 |
input_text = ' '.join(input_text.split()) # hapus white space
|
25 |
+
input_tokenized = mbart_tokenizer.encode(input_text, return_tensors='pt')
|
26 |
|
27 |
#generate input
|
28 |
summary_ids = mbart_model.generate(input_tokenized,
|