import gradio as gr | |
from transformers import pipeline | |
import requests | |
import json | |
def greet(file): | |
API_URL = "https://api-inference.huggingface.co/models/openai/whisper-large-v3-turbo" | |
headers = {"Authorization": "Bearer hf_api_key"} | |
def query(file): | |
with open(file, "rb") as f: | |
data = f.read() | |
response = requests.post(API_URL, headers=headers, data=data) | |
return response.json() | |
my_text = query(file) | |
#sentences = my_text["text"].split(".") | |
return my_text | |
#translation = pipeline("translation", model="Helsinki-NLP/opus-mt-en-hu") | |
#text_translated=[] | |
#for text in sentences: | |
# text_translated.append(translation(text)) | |
#combined_text = ' '.join([item['translation_text'] for sublist in text_translated for item in sublist]) | |
#return text_translated | |
demo = gr.Interface(fn=greet, inputs="file", outputs="text") | |
demo.launch() |