Spaces:
Sleeping
Sleeping
Update app.py
Browse filesSoft Deleting the Space
app.py
CHANGED
@@ -10,8 +10,8 @@ from transformers import WhisperProcessor, WhisperForConditionalGeneration
|
|
10 |
processor1 = WhisperProcessor.from_pretrained("openai/whisper-large-v2")
|
11 |
model1 = WhisperForConditionalGeneration.from_pretrained("openai/whisper-large-v2")
|
12 |
|
13 |
-
tokenizer1 = AutoTokenizer.from_pretrained("microsoft/BioGPT-Large-PubMedQA", add_special_tokens=False)
|
14 |
-
model = AutoModelForCausalLM.from_pretrained("microsoft/BioGPT-Large-PubMedQA")#.to('cuda:0')
|
15 |
|
16 |
|
17 |
def text_to_speech(text_input):
|
@@ -119,7 +119,7 @@ examples = [
|
|
119 |
|
120 |
app = gr.Blocks()
|
121 |
with app:
|
122 |
-
gr.Markdown("# **<h4 align='center'>Voice based Medical Informational Bot<h4>**")
|
123 |
|
124 |
with gr.Row():
|
125 |
with gr.Column():
|
|
|
10 |
processor1 = WhisperProcessor.from_pretrained("openai/whisper-large-v2")
|
11 |
model1 = WhisperForConditionalGeneration.from_pretrained("openai/whisper-large-v2")
|
12 |
|
13 |
+
#tokenizer1 = AutoTokenizer.from_pretrained("microsoft/BioGPT-Large-PubMedQA", add_special_tokens=False)
|
14 |
+
#model = AutoModelForCausalLM.from_pretrained("microsoft/BioGPT-Large-PubMedQA")#.to('cuda:0')
|
15 |
|
16 |
|
17 |
def text_to_speech(text_input):
|
|
|
119 |
|
120 |
app = gr.Blocks()
|
121 |
with app:
|
122 |
+
gr.Markdown("# **<h4 align='center'>Voice based Medical Informational Bot - Won't Work due to Resource Constraints<h4>**")
|
123 |
|
124 |
with gr.Row():
|
125 |
with gr.Column():
|