Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -30,6 +30,7 @@ def translate_text(file, text_input):
|
|
30 |
|
31 |
repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
|
32 |
|
|
|
33 |
llm = HuggingFaceEndpoint(
|
34 |
repo_id=repo_id,
|
35 |
max_length=128,
|
@@ -37,37 +38,47 @@ def translate_text(file, text_input):
|
|
37 |
huggingfacehub_api_token=api_token,
|
38 |
)
|
39 |
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
prompt = PromptTemplate.from_template(system_prompt_1)
|
45 |
-
|
46 |
llm_chain = prompt | llm
|
47 |
file_translation = llm_chain.invoke({"text": file_text})
|
48 |
-
|
49 |
-
|
50 |
-
|
|
|
|
|
|
|
|
|
|
|
51 |
with open(output_file_path, 'w', encoding='utf-8') as f:
|
52 |
f.write(file_translation)
|
|
|
|
|
53 |
|
54 |
-
|
55 |
-
|
56 |
iface = gr.Interface(
|
57 |
fn=translate_text,
|
58 |
-
inputs=[
|
59 |
-
gr.File(label="Upload Text File")
|
60 |
-
|
61 |
-
],
|
62 |
outputs=[
|
63 |
-
gr.Textbox(label="
|
64 |
-
gr.File(label="Download Translated File
|
65 |
-
|
66 |
],
|
67 |
-
title="Text
|
68 |
-
description="Upload a
|
69 |
allow_flagging="never"
|
70 |
-
|
71 |
)
|
72 |
|
73 |
iface.launch(debug=True)
|
|
|
|
30 |
|
31 |
repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
|
32 |
|
33 |
+
# Initialize the HuggingFace endpoint
|
34 |
llm = HuggingFaceEndpoint(
|
35 |
repo_id=repo_id,
|
36 |
max_length=128,
|
|
|
38 |
huggingfacehub_api_token=api_token,
|
39 |
)
|
40 |
|
41 |
+
# Read the input file based on its type
|
42 |
+
if file.name.endswith('.txt'):
|
43 |
+
with open(file.name, 'r', encoding='utf-8') as f:
|
44 |
+
file_text = f.read()
|
45 |
+
elif file.name.endswith('.rtf'):
|
46 |
+
import pyth.plugins.rtf15.reader as rtf15_reader
|
47 |
+
import pyth.plugins.plaintext.writer as plaintext_writer
|
48 |
+
doc = rtf15_reader.read(f)
|
49 |
+
file_text = plaintext_writer.write(doc).getvalue()
|
50 |
+
else:
|
51 |
+
raise ValueError("Unsupported file type. Please upload a .txt or .rtf file.")
|
52 |
+
|
53 |
+
# Define the prompt template and create the LLM chain
|
54 |
prompt = PromptTemplate.from_template(system_prompt_1)
|
|
|
55 |
llm_chain = prompt | llm
|
56 |
file_translation = llm_chain.invoke({"text": file_text})
|
57 |
+
|
58 |
+
# Determine the output file path and type
|
59 |
+
if file.name.endswith('.txt'):
|
60 |
+
output_file_path = "translated_file.txt"
|
61 |
+
elif file.name.endswith('.rtf'):
|
62 |
+
output_file_path = "translated_file.rtf"
|
63 |
+
|
64 |
+
# Write the translated text to the output file
|
65 |
with open(output_file_path, 'w', encoding='utf-8') as f:
|
66 |
f.write(file_translation)
|
67 |
+
|
68 |
+
return file_translation, output_file_path
|
69 |
|
70 |
+
# Create the Gradio interface
|
|
|
71 |
iface = gr.Interface(
|
72 |
fn=translate_text,
|
73 |
+
inputs=[gr.File(label="Upload Text File")],
|
|
|
|
|
|
|
74 |
outputs=[
|
75 |
+
gr.Textbox(label="Transcribed Content"),
|
76 |
+
gr.File(label="Download Translated File")
|
|
|
77 |
],
|
78 |
+
title="Text Transcriber",
|
79 |
+
description="Upload a .txt or .rtf file to translate its content using LangChain and Mistral-7B-Instruct-v0.2 model with predefined system prompts.",
|
80 |
allow_flagging="never"
|
|
|
81 |
)
|
82 |
|
83 |
iface.launch(debug=True)
|
84 |
+
|