owaiskha9654
commited on
Commit
β’
ab38aba
1
Parent(s):
9ea6fb2
Update app.py
Browse files
app.py
CHANGED
@@ -38,8 +38,8 @@ def Multi_Label_Classification_of_Pubmed_Articles(model_input: str) -> Dict[str,
|
|
38 |
return ret
|
39 |
|
40 |
|
41 |
-
model_input = gr.Textbox("Input text here (Note: This is trained to classify Medical Articles)", show_label=False)
|
42 |
-
model_output = gr.Label("Multi Label MeSH Result", num_top_classes=
|
43 |
|
44 |
|
45 |
examples = [
|
@@ -65,12 +65,13 @@ examples = [
|
|
65 |
)
|
66 |
]
|
67 |
|
68 |
-
title = "
|
69 |
description = "The traditional machine learning models give a lot of pain when we do not have sufficient labeled data for the specific task or domain we care about to train a reliable model. Transfer learning allows us to deal with these scenarios by leveraging the already existing labeled data of some related task or domain. We try to store this knowledge gained in solving the source task in the source domain and apply it to our problem of interest. In this work, I have utilized Transfer Learning utilizing BertForSequenceClassification model to fine tune on Pubmed MultiLabel classification Dataset."
|
70 |
article = (
|
71 |
-
"Author: Owais Ahmad \n"
|
72 |
"Model Trained Kaggle on <a href=\"https://www.kaggle.com/code/owaiskhan9654/multi-label-classification-of-pubmed-articles\">Link</a> \n "
|
73 |
-
"
|
|
|
74 |
)
|
75 |
|
76 |
|
|
|
38 |
return ret
|
39 |
|
40 |
|
41 |
+
model_input = gr.Textbox("Input text here (Note: This model is trained to classify Medical Articles(Still in Progress phase))", show_label=False)
|
42 |
+
model_output = gr.Label("Multi Label MeSH(Medical Subheadings) Result", num_top_classes=6, show_label=True, label="MeSH(Medical Subheadings) Labels assigned to this article")
|
43 |
|
44 |
|
45 |
examples = [
|
|
|
65 |
)
|
66 |
]
|
67 |
|
68 |
+
title = "Multi Label Classification of Pubmed Articles (Thoucentric)"
|
69 |
description = "The traditional machine learning models give a lot of pain when we do not have sufficient labeled data for the specific task or domain we care about to train a reliable model. Transfer learning allows us to deal with these scenarios by leveraging the already existing labeled data of some related task or domain. We try to store this knowledge gained in solving the source task in the source domain and apply it to our problem of interest. In this work, I have utilized Transfer Learning utilizing BertForSequenceClassification model to fine tune on Pubmed MultiLabel classification Dataset."
|
70 |
article = (
|
71 |
+
"Author: Owais Ahmad Data Scientist at Thoucentric <a href=\"https://www.linkedin.com/in/owaiskhan9654/\">Link</a> \n"
|
72 |
"Model Trained Kaggle on <a href=\"https://www.kaggle.com/code/owaiskhan9654/multi-label-classification-of-pubmed-articles\">Link</a> \n "
|
73 |
+
"My Kaggle Profile <a href=\"https://www.kaggle.com/owaiskhan9654\">Link</a>"
|
74 |
+
"HuggingFace Model Deployed Repository <a href=\"https://huggingface.co/owaiskha9654/Multi-Label-Classification-of-PubMed-Articles\">Link</a> \n"
|
75 |
)
|
76 |
|
77 |
|