Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -19,14 +19,14 @@ st.subheader("Using a Hugging Face GPT-2 model")
|
|
19 |
|
20 |
# Input text box
|
21 |
src_text = st.text_area("Enter Japanese text for translation:")
|
22 |
-
print(src_text)
|
23 |
if st.button("Translate"):
|
24 |
if src_text.strip():
|
25 |
with st.spinner("Translating..."):
|
26 |
# Prepare the input for the model
|
27 |
prompt = f"Translate the following Japanese sentence to English:\n\nJapanese:{src_text}\nEnglish:"
|
28 |
input_ids = tokenizer.encode(prompt, return_tensors='pt')[:,:-1].to(device)
|
29 |
-
print(tokenizer.batch_decode(input_ids))
|
30 |
# Generate translation
|
31 |
output_ids = model.generate(input_ids, max_length=128)
|
32 |
translation = tokenizer.batch_decode(output_ids[:, input_ids.size(-1):-1])[0]
|
|
|
19 |
|
20 |
# Input text box
|
21 |
src_text = st.text_area("Enter Japanese text for translation:")
|
22 |
+
# print(src_text)
|
23 |
if st.button("Translate"):
|
24 |
if src_text.strip():
|
25 |
with st.spinner("Translating..."):
|
26 |
# Prepare the input for the model
|
27 |
prompt = f"Translate the following Japanese sentence to English:\n\nJapanese:{src_text}\nEnglish:"
|
28 |
input_ids = tokenizer.encode(prompt, return_tensors='pt')[:,:-1].to(device)
|
29 |
+
# print(tokenizer.batch_decode(input_ids))
|
30 |
# Generate translation
|
31 |
output_ids = model.generate(input_ids, max_length=128)
|
32 |
translation = tokenizer.batch_decode(output_ids[:, input_ids.size(-1):-1])[0]
|