Spaces:
Runtime error
Runtime error
from transformers import MBartForConditionalGeneration, MBart50Tokenizer | |
import dat | |
# Load the model and tokenizer | |
model_name = "LocalDoc/mbart_large_qa_azerbaijan" | |
tokenizer = MBart50Tokenizer.from_pretrained(model_name, src_lang="en_XX", tgt_lang="az_AZ") | |
model = MBartForConditionalGeneration.from_pretrained(model_name) | |
def answer_question(context, question): | |
# Prepare input text | |
input_text = f"context: {context} question: {question}" | |
inputs = tokenizer(input_text, return_tensors="pt", max_length=512, truncation=True, padding="max_length") | |
# Generate answer | |
outputs = model.generate( | |
input_ids=inputs["input_ids"], | |
attention_mask=inputs["attention_mask"], | |
max_length=128, | |
num_beams=5, | |
early_stopping=True | |
) | |
# Decode the answer | |
answer = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return answer | |
# Example usage | |
context = dat.data | |
question = "Vətəndaşın icazəsi olmadan videosunu çəkmək qadağandır?" | |
answer = answer_question(context, question) | |
print(answer) | |