File size: 1,982 Bytes
eccca61
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import os
import gradio as gr
from PIL import Image
from timeit import default_timer as timer
from tensorflow import keras
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
import numpy as np

username = "runaksh"
repo_name = "finetuned-sentiment-model"
repo_path = username+ '/' + repo_name
model_1 = pipeline(model= repo_path)

model_2 = AutoModelForSequenceClassification.from_pretrained("runaksh/Symptom-2-disease_distilBERT")
tokenizer_2 = AutoTokenizer.from_pretrained("runaksh/Symptom-2-disease_distilBERT")

# Function for response generation
def predict_sentiment(text):
    result = model_1(text)
    if result[0]['label'].endswith('0'):
        return 'Negative'
    else:
        return 'Positive'

def predict(sample, validate=True):
  pred = classifier(sample)[0]['label']
  return pred

def make_block(dem):
    with dem:
        gr.Markdown("Practicing for Capstone")
        with gr.Tabs():
            with gr.TabItem("Sentiment Classification"):
                with gr.Row():
                    in_prompt_1 = gr.components.Textbox(lines=10, placeholder=None, label='Enter review text')
                    out_response_1 = gr.components.Textbox(type="text", label='Sentiment')
                b1 = gr.Button("Enter")

            with gr.TabItem("Symptoms and Disease Classification"):
                with gr.Row():
                    in_prompt_2 = gr.components.Textbox(lines=2, label='Enter the Symptoms')
                    out_response_2 = gr.components.Textbox(label='Disease')
                b2 = gr.Button("Enter")
            b1.click(predict_sentiment, inputs=in_prompt_1, outputs=out_response_1)
            b2.click(predict, inputs=in_prompt_2, outputs=out_response_2)
           
if __name__ == '__main__':
    model_1 = pipeline(model= repo_path)
    classifier = pipeline("text-classification", model=model_2, tokenizer=tokenizer_2)

    demo = gr.Blocks()
    make_block(demo)
    demo.launch()