elariz commited on
Commit
d4fda7c
·
verified ·
1 Parent(s): 48d293f

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -49
app.py DELETED
@@ -1,49 +0,0 @@
1
- import gradio as gr
2
- from transformers import pipeline
3
- import re
4
- from unidecode import unidecode
5
- import sentencepiece
6
- import nltk
7
- nltk.data.path.append('./nltk_data')
8
-
9
- # Preprocessing function: lowercasing and removing special characters
10
- def preprocess_text(text):
11
- text = text.lower()
12
- text = unidecode(text)
13
- text = re.sub(r'\W+', ' ', text) # Remove non-word characters
14
- tokens = nltk.word_tokenize(text)
15
- return ' '.join(tokens) # Returning as a single string instead of tokens for the context
16
-
17
- # Load the multilingual model for question answering
18
- qa_model = pipeline("question-answering", model="deepset/xlm-roberta-large-squad2")
19
-
20
- # Function to generate the answer based on question and uploaded context
21
- def answer_question(question, context):
22
- try:
23
- preprocessed_context = preprocess_text(context)
24
- result = qa_model(question=question, context=preprocessed_context)
25
- return result['answer']
26
- except Exception as e:
27
- return f"Error: {str(e)}"
28
-
29
- # Gradio interface
30
- def qa_app(text_file, question):
31
- try:
32
- with open(text_file.name, 'r') as file:
33
- context = file.read()
34
- return answer_question(question, context)
35
- except Exception as e:
36
- return f"Error reading file: {str(e)}"
37
-
38
- # Create Gradio interface with updated syntax
39
- iface = gr.Interface(
40
- fn=qa_app, # The function that processes input
41
- inputs=[gr.File(label="Upload your text file"), gr.Textbox(label="Enter your question")],
42
- outputs="text",
43
- title="Multilingual Question Answering",
44
- description="Upload a text file and ask a question based on its content."
45
- )
46
-
47
- # Launch the Gradio app
48
- iface.launch()
49
-